fix clean code warning

This commit is contained in:
hbhu_bin 2022-09-21 14:21:13 +08:00
parent 7102e94755
commit e472da5e98
8 changed files with 34 additions and 32 deletions

View File

@ -246,7 +246,7 @@ void ReplaceLoadUserMakeTuple(const FuncGraphManagerPtr &manager, const CNodePtr
manager->Replace(make_tuple, new_make_tuple);
}
bool ReplaceLoadUser(const FuncGraphManagerPtr &manager, const FuncGraphPtr &fg, const AnfNodePtr &load) {
bool ReplaceLoadUser(const FuncGraphManagerPtr &manager, const AnfNodePtr &load) {
bool change = false;
auto load_users = manager->node_users()[load];
for (const auto &load_user : load_users) {
@ -281,15 +281,15 @@ bool ReplaceLoadUser(const FuncGraphManagerPtr &manager, const FuncGraphPtr &fg,
return change;
}
bool ReplaceSameGroupLoad(const FuncGraphManagerPtr &manager, const FuncGraphPtr &fg,
const std::vector<AnfNodePtr> &toposet, const std::vector<size_t> &group) {
bool ReplaceSameGroupLoad(const FuncGraphManagerPtr &manager, const std::vector<AnfNodePtr> &toposet,
const std::vector<size_t> &group) {
if (group.size() <= 1) {
return false;
}
bool change = false;
const auto &main = toposet[group[0]];
for (size_t i = 1; i < group.size(); i++) {
change = ReplaceLoadUser(manager, fg, toposet[group[i]]);
change = ReplaceLoadUser(manager, toposet[group[i]]);
manager->Replace(toposet[group[i]], main);
}
return change;
@ -389,7 +389,7 @@ bool AutoMonadEliminator::ReplaceAutoMonadNode(const FuncGraphManagerPtr &manage
(void)need_merge_loads.insert(need_merge_loads.cend(), groups.cbegin(), groups.cend());
}
for (auto &group : need_merge_loads) {
bool replaced = ReplaceSameGroupLoad(manager, fg, toposet, group);
bool replaced = ReplaceSameGroupLoad(manager, toposet, group);
if (replaced) {
changed = true;
}

View File

@ -48,7 +48,7 @@ std::vector<GeTensorPtr> ConvertInputTensors(const std::vector<MeTensorPtr> &me_
std::vector<MeTensorPtr> ConvertGeTensors(const std::vector<GeTensorPtr> &ge_tensors);
GeDataType ConvertDataType(const MeDataType &type);
MeTensorPtr ConvertGeTensor(GeTensorPtr ge_tensor, const ShapeVector &request_dims);
MeTensorPtr ConvertGeTensor(const GeTensorPtr &ge_tensor, const ShapeVector &request_dims);
MeTensorPtr ConvertGeTensor(const GeTensorPtr &tensor);
MeTensorPtr ConvertGeTensor(const GeTensorPtr &tensor, const TypeId &me_type);
@ -68,17 +68,17 @@ FuncGraphPtr GetAnfGraph(uint32_t graph_id);
// convert
BACKEND_EXPORT DfGraphConvertorPtr NewConverter(const FuncGraphPtr &graph);
BACKEND_EXPORT void SetTraining(DfGraphConvertorPtr converter, bool training);
BACKEND_EXPORT void BuildGraph(DfGraphConvertorPtr converter,
BACKEND_EXPORT void SetTraining(const DfGraphConvertorPtr &converter, bool training);
BACKEND_EXPORT void BuildGraph(const DfGraphConvertorPtr &converter,
const std::map<std::string, std::shared_ptr<tensor::Tensor>> &maps);
void GenerateBroadcastGraph(DfGraphConvertorPtr converter, const TensorOrderMap &tensors);
BACKEND_EXPORT void GenerateCheckpointGraph(DfGraphConvertorPtr converter);
BACKEND_EXPORT int ErrCode(DfGraphConvertorPtr converter);
void GenerateBroadcastGraph(const DfGraphConvertorPtr &converter, const TensorOrderMap &tensors);
BACKEND_EXPORT void GenerateCheckpointGraph(const DfGraphConvertorPtr &converter);
BACKEND_EXPORT int ErrCode(const DfGraphConvertorPtr &converter);
BACKEND_EXPORT DfGraphPtr GetComputeGraph(DfGraphConvertorPtr converter);
BACKEND_EXPORT DfGraphPtr GetInitGraph(DfGraphConvertorPtr converter);
BACKEND_EXPORT DfGraphPtr GetSaveCheckpointGraph(DfGraphConvertorPtr converter);
BACKEND_EXPORT DfGraphPtr GetBroadcastGraph(DfGraphConvertorPtr converter);
BACKEND_EXPORT DfGraphPtr GetComputeGraph(const DfGraphConvertorPtr &converter);
BACKEND_EXPORT DfGraphPtr GetInitGraph(const DfGraphConvertorPtr &converter);
BACKEND_EXPORT DfGraphPtr GetSaveCheckpointGraph(const DfGraphConvertorPtr &converter);
BACKEND_EXPORT DfGraphPtr GetBroadcastGraph(const DfGraphConvertorPtr &converter);
// new session
BACKEND_EXPORT std::shared_ptr<ge::Session> NewSession(const SessionOptions &sess_options);

View File

@ -438,7 +438,7 @@ class OrderEnforcer {
std::vector<CNodePtr> GetSpecialLoads(const RefLoads &loads_map1, const RefLoads &loads_map2,
const RefLoads &loads_map3, const RefLoads &loads_map4,
const std::set<CNodePtr> &call_nodes) {
const std::set<CNodePtr> &call_nodes) const {
std::vector<CNodePtr> need_insert_loads;
for (auto &refkey_load : loads_map1) {
auto &loads = refkey_load.second;

View File

@ -269,7 +269,7 @@ void AscendTdtQueue::ParseType(aclDataType acl_data_type, std::string *data_type
*data_type = type_iter->second;
}
bool AscendTdtQueue::Translate(const std::vector<DataQueueItem> &data, acltdtDataset **output_acl_dataset) {
bool AscendTdtQueue::Translate(const std::vector<DataQueueItem> &data, acltdtDataset **output_acl_dataset) const {
auto acl_dataset = acltdtCreateDataset();
if (acl_dataset == nullptr) {
MS_LOG(ERROR) << "Create tdt dataset failed.";
@ -497,7 +497,8 @@ bool AscendHostQueue::EnqueueData(void *buff, bool *need_resend) {
return true;
}
bool AscendHostQueue::CreateDataItemInfos(const std::vector<DataQueueItem> &data, std::vector<DataItemInfo> *items) {
bool AscendHostQueue::CreateDataItemInfos(const std::vector<DataQueueItem> &data,
std::vector<DataItemInfo> *items) const {
MS_EXCEPTION_IF_NULL(items);
if (data.empty()) {
items->emplace_back(BuildDataItemInfo(ACL_TENSOR_DATA_END_OF_SEQUENCE, ACL_BOOL, nullptr, 0UL, nullptr, 0UL));

View File

@ -89,7 +89,7 @@ class AscendTdtQueue : public DataQueue {
void DestroyAclDataset(acltdtDataset *acl_dataset, bool include_data_item = true) const;
bool AssembleTensor2AclDataset(const std::vector<DataQueueItem> &data, acltdtDataset *acl_dataset) const;
void ParseType(aclDataType acl_data_type, std::string *data_type) const;
bool Translate(const std::vector<DataQueueItem> &data, acltdtDataset **output_acl_dataset);
bool Translate(const std::vector<DataQueueItem> &data, acltdtDataset **output_acl_dataset) const;
std::shared_ptr<BlockingQueue> wingman_queue_;
acltdtChannelHandle *acl_handle_;
@ -126,7 +126,7 @@ class AscendHostQueue : public DataQueue {
bool SetTransId4MBuf(void **buff);
bool LaunchTensor2MBuff(const std::vector<DataQueueItem> &data, void **buff);
bool EnqueueData(void *buff, bool *need_resend);
bool CreateDataItemInfos(const std::vector<DataQueueItem> &data, std::vector<DataItemInfo> *items);
bool CreateDataItemInfos(const std::vector<DataQueueItem> &data, std::vector<DataItemInfo> *items) const;
bool SerializeDataItemInfos(std::vector<DataItemInfo> *items, void **buff) const;
DataItemInfo BuildDataItemInfo(acltdtTensorType acl_data_type, int32_t tensor_type, const int64_t *dims,
size_t dim_size, void *data_ptr, uint64_t data_len) const;

View File

@ -412,7 +412,7 @@ bool AscendKernelRuntime::Init() {
}
} catch (const std::exception &e) {
if (init_device) {
ResetDevice(device_id_);
(void)ResetDevice(device_id_);
}
MS_LOG(EXCEPTION) << "Ascend kernel runtime initialization failed." << GetErrorMessage(true)
<< "#dmsg#Framework Error Message:#dmsg#" << e.what();

View File

@ -179,7 +179,7 @@ std::vector<MeTensorPtr> ConvertGeTensors(const std::vector<GeTensorPtr> &ge_ten
GeDataType ConvertDataType(const MeDataType &type) { return TransformUtil::ConvertDataType(type); }
MeTensorPtr ConvertGeTensor(GeTensorPtr ge_tensor, const ShapeVector &request_dims) {
MeTensorPtr ConvertGeTensor(const GeTensorPtr &ge_tensor, const ShapeVector &request_dims) {
return TransformUtil::ConvertGeTensor(ge_tensor, request_dims);
}
@ -222,42 +222,43 @@ DfGraphConvertorPtr NewConverter(const FuncGraphPtr &graph) {
return converter;
}
void SetTraining(DfGraphConvertorPtr converter, bool training) {
void SetTraining(const DfGraphConvertorPtr &converter, bool training) {
MS_EXCEPTION_IF_NULL(converter);
converter->set_training(training);
}
void BuildGraph(DfGraphConvertorPtr converter, const std::map<std::string, std::shared_ptr<tensor::Tensor>> &maps) {
void BuildGraph(const DfGraphConvertorPtr &converter,
const std::map<std::string, std::shared_ptr<tensor::Tensor>> &maps) {
MS_EXCEPTION_IF_NULL(converter);
(void)converter->ConvertAllNode().InitParam(maps).BuildGraph();
}
void GenerateBroadcastGraph(DfGraphConvertorPtr converter, const TensorOrderMap &tensors) {
void GenerateBroadcastGraph(const DfGraphConvertorPtr &converter, const TensorOrderMap &tensors) {
MS_EXCEPTION_IF_NULL(converter);
(void)converter->GenerateBroadcastGraph(tensors);
}
void GenerateCheckpointGraph(DfGraphConvertorPtr converter) {
void GenerateCheckpointGraph(const DfGraphConvertorPtr &converter) {
MS_EXCEPTION_IF_NULL(converter);
(void)converter->GenerateCheckpointGraph();
}
int ErrCode(DfGraphConvertorPtr converter) {
int ErrCode(const DfGraphConvertorPtr &converter) {
MS_EXCEPTION_IF_NULL(converter);
return converter->ErrCode();
}
DfGraphPtr GetComputeGraph(DfGraphConvertorPtr converter) {
DfGraphPtr GetComputeGraph(const DfGraphConvertorPtr &converter) {
MS_EXCEPTION_IF_NULL(converter);
return converter->GetComputeGraph();
}
DfGraphPtr GetInitGraph(DfGraphConvertorPtr converter) {
DfGraphPtr GetInitGraph(const DfGraphConvertorPtr &converter) {
MS_EXCEPTION_IF_NULL(converter);
return converter->GetInitGraph();
}
DfGraphPtr GetSaveCheckpointGraph(DfGraphConvertorPtr converter) {
DfGraphPtr GetSaveCheckpointGraph(const DfGraphConvertorPtr &converter) {
MS_EXCEPTION_IF_NULL(converter);
return converter->GetSaveCheckpointGraph();
}
DfGraphPtr GetBroadcastGraph(DfGraphConvertorPtr converter) {
DfGraphPtr GetBroadcastGraph(const DfGraphConvertorPtr &converter) {
MS_EXCEPTION_IF_NULL(converter);
return converter->GetBroadcastGraph();
}

View File

@ -41,7 +41,7 @@ abstract::ShapePtr GatherNdInferShape(const PrimitivePtr &primitive, const std::
output_shape.push_back(indices_shape[i]);
}
if ((UlongToLong(indices_rank) - 1) >= 0 && indices_shape[UlongToLong(indices_rank) - 1] < 0) {
if (indices_rank >= 1 && indices_shape[indices_rank - 1] < 0) {
return std::make_shared<abstract::Shape>(std::vector<int64_t>{UNKNOWN_RANK});
}
for (size_t i = LongToSize(indices_shape[indices_rank - 1]); i < input_rank; ++i) {