From e472da5e98ef19d0ffdcf7b41b2d343a6b5ebc1e Mon Sep 17 00:00:00 2001 From: hbhu_bin Date: Wed, 21 Sep 2022 14:21:13 +0800 Subject: [PATCH] fix clean code warning --- .../optimizer/auto_monad_eliminate.cc | 10 ++++----- .../ccsrc/include/transform/graph_ir/utils.h | 20 +++++++++--------- .../jit/static_analysis/order_enforce.cc | 2 +- .../ascend/hal/device/ascend_data_queue.cc | 5 +++-- .../ascend/hal/device/ascend_data_queue.h | 4 ++-- .../hal/device/ascend_kernel_runtime.cc | 2 +- mindspore/ccsrc/transform/graph_ir/utils.cc | 21 ++++++++++--------- mindspore/core/ops/gather_nd.cc | 2 +- 8 files changed, 34 insertions(+), 32 deletions(-) diff --git a/mindspore/ccsrc/frontend/optimizer/auto_monad_eliminate.cc b/mindspore/ccsrc/frontend/optimizer/auto_monad_eliminate.cc index 97b577fa0f7..e928c3abce7 100644 --- a/mindspore/ccsrc/frontend/optimizer/auto_monad_eliminate.cc +++ b/mindspore/ccsrc/frontend/optimizer/auto_monad_eliminate.cc @@ -246,7 +246,7 @@ void ReplaceLoadUserMakeTuple(const FuncGraphManagerPtr &manager, const CNodePtr manager->Replace(make_tuple, new_make_tuple); } -bool ReplaceLoadUser(const FuncGraphManagerPtr &manager, const FuncGraphPtr &fg, const AnfNodePtr &load) { +bool ReplaceLoadUser(const FuncGraphManagerPtr &manager, const AnfNodePtr &load) { bool change = false; auto load_users = manager->node_users()[load]; for (const auto &load_user : load_users) { @@ -281,15 +281,15 @@ bool ReplaceLoadUser(const FuncGraphManagerPtr &manager, const FuncGraphPtr &fg, return change; } -bool ReplaceSameGroupLoad(const FuncGraphManagerPtr &manager, const FuncGraphPtr &fg, - const std::vector &toposet, const std::vector &group) { +bool ReplaceSameGroupLoad(const FuncGraphManagerPtr &manager, const std::vector &toposet, + const std::vector &group) { if (group.size() <= 1) { return false; } bool change = false; const auto &main = toposet[group[0]]; for (size_t i = 1; i < group.size(); i++) { - change = ReplaceLoadUser(manager, fg, toposet[group[i]]); + change = ReplaceLoadUser(manager, toposet[group[i]]); manager->Replace(toposet[group[i]], main); } return change; @@ -389,7 +389,7 @@ bool AutoMonadEliminator::ReplaceAutoMonadNode(const FuncGraphManagerPtr &manage (void)need_merge_loads.insert(need_merge_loads.cend(), groups.cbegin(), groups.cend()); } for (auto &group : need_merge_loads) { - bool replaced = ReplaceSameGroupLoad(manager, fg, toposet, group); + bool replaced = ReplaceSameGroupLoad(manager, toposet, group); if (replaced) { changed = true; } diff --git a/mindspore/ccsrc/include/transform/graph_ir/utils.h b/mindspore/ccsrc/include/transform/graph_ir/utils.h index dafd8f1246a..3014f91dadd 100644 --- a/mindspore/ccsrc/include/transform/graph_ir/utils.h +++ b/mindspore/ccsrc/include/transform/graph_ir/utils.h @@ -48,7 +48,7 @@ std::vector ConvertInputTensors(const std::vector &me_ std::vector ConvertGeTensors(const std::vector &ge_tensors); GeDataType ConvertDataType(const MeDataType &type); -MeTensorPtr ConvertGeTensor(GeTensorPtr ge_tensor, const ShapeVector &request_dims); +MeTensorPtr ConvertGeTensor(const GeTensorPtr &ge_tensor, const ShapeVector &request_dims); MeTensorPtr ConvertGeTensor(const GeTensorPtr &tensor); MeTensorPtr ConvertGeTensor(const GeTensorPtr &tensor, const TypeId &me_type); @@ -68,17 +68,17 @@ FuncGraphPtr GetAnfGraph(uint32_t graph_id); // convert BACKEND_EXPORT DfGraphConvertorPtr NewConverter(const FuncGraphPtr &graph); -BACKEND_EXPORT void SetTraining(DfGraphConvertorPtr converter, bool training); -BACKEND_EXPORT void BuildGraph(DfGraphConvertorPtr converter, +BACKEND_EXPORT void SetTraining(const DfGraphConvertorPtr &converter, bool training); +BACKEND_EXPORT void BuildGraph(const DfGraphConvertorPtr &converter, const std::map> &maps); -void GenerateBroadcastGraph(DfGraphConvertorPtr converter, const TensorOrderMap &tensors); -BACKEND_EXPORT void GenerateCheckpointGraph(DfGraphConvertorPtr converter); -BACKEND_EXPORT int ErrCode(DfGraphConvertorPtr converter); +void GenerateBroadcastGraph(const DfGraphConvertorPtr &converter, const TensorOrderMap &tensors); +BACKEND_EXPORT void GenerateCheckpointGraph(const DfGraphConvertorPtr &converter); +BACKEND_EXPORT int ErrCode(const DfGraphConvertorPtr &converter); -BACKEND_EXPORT DfGraphPtr GetComputeGraph(DfGraphConvertorPtr converter); -BACKEND_EXPORT DfGraphPtr GetInitGraph(DfGraphConvertorPtr converter); -BACKEND_EXPORT DfGraphPtr GetSaveCheckpointGraph(DfGraphConvertorPtr converter); -BACKEND_EXPORT DfGraphPtr GetBroadcastGraph(DfGraphConvertorPtr converter); +BACKEND_EXPORT DfGraphPtr GetComputeGraph(const DfGraphConvertorPtr &converter); +BACKEND_EXPORT DfGraphPtr GetInitGraph(const DfGraphConvertorPtr &converter); +BACKEND_EXPORT DfGraphPtr GetSaveCheckpointGraph(const DfGraphConvertorPtr &converter); +BACKEND_EXPORT DfGraphPtr GetBroadcastGraph(const DfGraphConvertorPtr &converter); // new session BACKEND_EXPORT std::shared_ptr NewSession(const SessionOptions &sess_options); diff --git a/mindspore/ccsrc/pipeline/jit/static_analysis/order_enforce.cc b/mindspore/ccsrc/pipeline/jit/static_analysis/order_enforce.cc index f37c0b8a774..d35778f0af2 100644 --- a/mindspore/ccsrc/pipeline/jit/static_analysis/order_enforce.cc +++ b/mindspore/ccsrc/pipeline/jit/static_analysis/order_enforce.cc @@ -438,7 +438,7 @@ class OrderEnforcer { std::vector GetSpecialLoads(const RefLoads &loads_map1, const RefLoads &loads_map2, const RefLoads &loads_map3, const RefLoads &loads_map4, - const std::set &call_nodes) { + const std::set &call_nodes) const { std::vector need_insert_loads; for (auto &refkey_load : loads_map1) { auto &loads = refkey_load.second; diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.cc b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.cc index 65909c0136b..66a759d55bb 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.cc +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.cc @@ -269,7 +269,7 @@ void AscendTdtQueue::ParseType(aclDataType acl_data_type, std::string *data_type *data_type = type_iter->second; } -bool AscendTdtQueue::Translate(const std::vector &data, acltdtDataset **output_acl_dataset) { +bool AscendTdtQueue::Translate(const std::vector &data, acltdtDataset **output_acl_dataset) const { auto acl_dataset = acltdtCreateDataset(); if (acl_dataset == nullptr) { MS_LOG(ERROR) << "Create tdt dataset failed."; @@ -497,7 +497,8 @@ bool AscendHostQueue::EnqueueData(void *buff, bool *need_resend) { return true; } -bool AscendHostQueue::CreateDataItemInfos(const std::vector &data, std::vector *items) { +bool AscendHostQueue::CreateDataItemInfos(const std::vector &data, + std::vector *items) const { MS_EXCEPTION_IF_NULL(items); if (data.empty()) { items->emplace_back(BuildDataItemInfo(ACL_TENSOR_DATA_END_OF_SEQUENCE, ACL_BOOL, nullptr, 0UL, nullptr, 0UL)); diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h index 6ea5403239b..e9c56bd58ee 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_data_queue.h @@ -89,7 +89,7 @@ class AscendTdtQueue : public DataQueue { void DestroyAclDataset(acltdtDataset *acl_dataset, bool include_data_item = true) const; bool AssembleTensor2AclDataset(const std::vector &data, acltdtDataset *acl_dataset) const; void ParseType(aclDataType acl_data_type, std::string *data_type) const; - bool Translate(const std::vector &data, acltdtDataset **output_acl_dataset); + bool Translate(const std::vector &data, acltdtDataset **output_acl_dataset) const; std::shared_ptr wingman_queue_; acltdtChannelHandle *acl_handle_; @@ -126,7 +126,7 @@ class AscendHostQueue : public DataQueue { bool SetTransId4MBuf(void **buff); bool LaunchTensor2MBuff(const std::vector &data, void **buff); bool EnqueueData(void *buff, bool *need_resend); - bool CreateDataItemInfos(const std::vector &data, std::vector *items); + bool CreateDataItemInfos(const std::vector &data, std::vector *items) const; bool SerializeDataItemInfos(std::vector *items, void **buff) const; DataItemInfo BuildDataItemInfo(acltdtTensorType acl_data_type, int32_t tensor_type, const int64_t *dims, size_t dim_size, void *data_ptr, uint64_t data_len) const; diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_kernel_runtime.cc b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_kernel_runtime.cc index 3534e920815..e20c9312082 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_kernel_runtime.cc +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/ascend_kernel_runtime.cc @@ -412,7 +412,7 @@ bool AscendKernelRuntime::Init() { } } catch (const std::exception &e) { if (init_device) { - ResetDevice(device_id_); + (void)ResetDevice(device_id_); } MS_LOG(EXCEPTION) << "Ascend kernel runtime initialization failed." << GetErrorMessage(true) << "#dmsg#Framework Error Message:#dmsg#" << e.what(); diff --git a/mindspore/ccsrc/transform/graph_ir/utils.cc b/mindspore/ccsrc/transform/graph_ir/utils.cc index 1d4df4594d2..042b6c57b7d 100644 --- a/mindspore/ccsrc/transform/graph_ir/utils.cc +++ b/mindspore/ccsrc/transform/graph_ir/utils.cc @@ -179,7 +179,7 @@ std::vector ConvertGeTensors(const std::vector &ge_ten GeDataType ConvertDataType(const MeDataType &type) { return TransformUtil::ConvertDataType(type); } -MeTensorPtr ConvertGeTensor(GeTensorPtr ge_tensor, const ShapeVector &request_dims) { +MeTensorPtr ConvertGeTensor(const GeTensorPtr &ge_tensor, const ShapeVector &request_dims) { return TransformUtil::ConvertGeTensor(ge_tensor, request_dims); } @@ -222,42 +222,43 @@ DfGraphConvertorPtr NewConverter(const FuncGraphPtr &graph) { return converter; } -void SetTraining(DfGraphConvertorPtr converter, bool training) { +void SetTraining(const DfGraphConvertorPtr &converter, bool training) { MS_EXCEPTION_IF_NULL(converter); converter->set_training(training); } -void BuildGraph(DfGraphConvertorPtr converter, const std::map> &maps) { +void BuildGraph(const DfGraphConvertorPtr &converter, + const std::map> &maps) { MS_EXCEPTION_IF_NULL(converter); (void)converter->ConvertAllNode().InitParam(maps).BuildGraph(); } -void GenerateBroadcastGraph(DfGraphConvertorPtr converter, const TensorOrderMap &tensors) { +void GenerateBroadcastGraph(const DfGraphConvertorPtr &converter, const TensorOrderMap &tensors) { MS_EXCEPTION_IF_NULL(converter); (void)converter->GenerateBroadcastGraph(tensors); } -void GenerateCheckpointGraph(DfGraphConvertorPtr converter) { +void GenerateCheckpointGraph(const DfGraphConvertorPtr &converter) { MS_EXCEPTION_IF_NULL(converter); (void)converter->GenerateCheckpointGraph(); } -int ErrCode(DfGraphConvertorPtr converter) { +int ErrCode(const DfGraphConvertorPtr &converter) { MS_EXCEPTION_IF_NULL(converter); return converter->ErrCode(); } -DfGraphPtr GetComputeGraph(DfGraphConvertorPtr converter) { +DfGraphPtr GetComputeGraph(const DfGraphConvertorPtr &converter) { MS_EXCEPTION_IF_NULL(converter); return converter->GetComputeGraph(); } -DfGraphPtr GetInitGraph(DfGraphConvertorPtr converter) { +DfGraphPtr GetInitGraph(const DfGraphConvertorPtr &converter) { MS_EXCEPTION_IF_NULL(converter); return converter->GetInitGraph(); } -DfGraphPtr GetSaveCheckpointGraph(DfGraphConvertorPtr converter) { +DfGraphPtr GetSaveCheckpointGraph(const DfGraphConvertorPtr &converter) { MS_EXCEPTION_IF_NULL(converter); return converter->GetSaveCheckpointGraph(); } -DfGraphPtr GetBroadcastGraph(DfGraphConvertorPtr converter) { +DfGraphPtr GetBroadcastGraph(const DfGraphConvertorPtr &converter) { MS_EXCEPTION_IF_NULL(converter); return converter->GetBroadcastGraph(); } diff --git a/mindspore/core/ops/gather_nd.cc b/mindspore/core/ops/gather_nd.cc index a2258c7f9b0..c9735158016 100644 --- a/mindspore/core/ops/gather_nd.cc +++ b/mindspore/core/ops/gather_nd.cc @@ -41,7 +41,7 @@ abstract::ShapePtr GatherNdInferShape(const PrimitivePtr &primitive, const std:: output_shape.push_back(indices_shape[i]); } - if ((UlongToLong(indices_rank) - 1) >= 0 && indices_shape[UlongToLong(indices_rank) - 1] < 0) { + if (indices_rank >= 1 && indices_shape[indices_rank - 1] < 0) { return std::make_shared(std::vector{UNKNOWN_RANK}); } for (size_t i = LongToSize(indices_shape[indices_rank - 1]); i < input_rank; ++i) {