clean code 1.6
This commit is contained in:
parent
29cc3d1422
commit
f94d37ebde
|
@ -186,7 +186,7 @@ static bool HasSideEffectBackProp(const CNodePtr &cnode) {
|
|||
return false;
|
||||
}
|
||||
|
||||
AnfNodePtr HandleRealToComplex(const AnfNodePtr &input, const CNodePtr &din, FuncGraphPtr fg) {
|
||||
AnfNodePtr HandleRealToComplex(const AnfNodePtr &input, const CNodePtr &din, const FuncGraphPtr &fg) {
|
||||
MS_EXCEPTION_IF_NULL(input);
|
||||
TypePtr input_type = input->Type();
|
||||
if (input_type == nullptr || !input_type->isa<TensorType>()) {
|
||||
|
|
|
@ -278,7 +278,7 @@ FuncGraphPtr KPrim::BpropToK(const T &primal, const FuncGraphPtr &bprop_fg, cons
|
|||
// din: CNodePtr with gradient of input.
|
||||
// fg: Funcgraph witch input and din belong to.
|
||||
// return: New din with inserted real op if necessarily.
|
||||
AnfNodePtr HandleRealToComplex(const AnfNodePtr &input, const CNodePtr &din, FuncGraphPtr fg);
|
||||
AnfNodePtr HandleRealToComplex(const AnfNodePtr &input, const CNodePtr &din, const FuncGraphPtr &fg);
|
||||
} // namespace ad
|
||||
} // namespace mindspore
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ FuncGraphPtr Grad(const FuncGraphPtr &func_graph, const opt::OptimizerPtr &optim
|
|||
manager_ptr->AddFuncGraph(func_graph);
|
||||
|
||||
FuncGraphPtr grad_fg = func_graph;
|
||||
static bool enable_closure = common::GetEnv("MS_DEV_ENABLE_CLOSURE") != "0";
|
||||
static const bool enable_closure = common::GetEnv("MS_DEV_ENABLE_CLOSURE") != "0";
|
||||
if (enable_closure) {
|
||||
if (func_graph->func_graphs_used().size() != 0 && optimzer->is_first_order_j()) {
|
||||
lift_fv_before_grad = true;
|
||||
|
|
|
@ -95,7 +95,7 @@ mindspore::HashSet<std::string> GetSerializableBpropList() {
|
|||
auto ops_list = serializable_bprop_ops_attr.cast<py::list>();
|
||||
for (auto op : ops_list) {
|
||||
if (py::isinstance<py::str>(op)) {
|
||||
serializable_bprop_list.insert(op.cast<std::string>());
|
||||
(void)serializable_bprop_list.insert(op.cast<std::string>());
|
||||
continue;
|
||||
}
|
||||
py::object prim_name = op.attr("__name__");
|
||||
|
@ -103,7 +103,7 @@ mindspore::HashSet<std::string> GetSerializableBpropList() {
|
|||
MS_LOG(WARNING) << "The name of obj " << py::str(op) << " to be exported to mindir should be a string";
|
||||
continue;
|
||||
}
|
||||
serializable_bprop_list.insert(prim_name.cast<std::string>());
|
||||
(void)serializable_bprop_list.insert(prim_name.cast<std::string>());
|
||||
}
|
||||
return serializable_bprop_list;
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ void GetFilesHash(const std::string &dir, mindspore::HashMap<std::string, std::s
|
|||
auto real_path = std::string(dir) + "/" + filename->d_name;
|
||||
(void)bprop_hash_to_file->emplace(system::sha256::GetHashFromFile(real_path), real_path);
|
||||
}
|
||||
closedir(open_dir);
|
||||
(void)closedir(open_dir);
|
||||
}
|
||||
|
||||
mindspore::HashMap<std::string, std::string> GetAllBpropFileHash() {
|
||||
|
|
|
@ -160,7 +160,7 @@ std::vector<AnfNodePtr> GenerateOutputTempGetItems(const FuncGraphPtr &func_grap
|
|||
MS_EXCEPTION_IF_NULL(node->abstract());
|
||||
if (!node->abstract()->isa<abstract::AbstractTuple>()) {
|
||||
if (node != func_graph->output()) {
|
||||
output_tmp_getitems.emplace_back(node);
|
||||
(void)output_tmp_getitems.emplace_back(node);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ bool EraseNode(const CNodePtr &cnode, size_t input_idx, const FuncGraphManagerPt
|
|||
}
|
||||
MS_LOG(WARNING) << "Erase dead node: " << dead_node->DebugString() << ", user: " << cnode->DebugString();
|
||||
// Can't use `Replace`, must use `SetEdge`.
|
||||
manager->SetEdge(cnode, input_idx, MakeScalarZero());
|
||||
manager->SetEdge(cnode, SizeToInt(input_idx), MakeScalarZero());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -335,7 +335,7 @@ std::shared_ptr<HashSet<size_t>> GetUsedParameters(const FuncGraphPtr &func_grap
|
|||
for (const auto ¶meter : parameters) {
|
||||
const auto &node_users_it = manager_node_users.find(parameter);
|
||||
if (node_users_it != manager_node_users.end() && !node_users_it->second.empty()) {
|
||||
used_parameter_indexes->insert(index);
|
||||
(void)used_parameter_indexes->insert(index);
|
||||
}
|
||||
index++;
|
||||
}
|
||||
|
@ -452,7 +452,7 @@ bool EraseGraphCaller(const FuncGraphPtr &func_graph, const FuncGraphAnalyzer &a
|
|||
// instead of arg user here.
|
||||
for (const auto &closure : call_closures) {
|
||||
for (size_t i = 0; i < closure->arg_users_.size(); i++) {
|
||||
EraseArg(closure->arg_indexes_[i], closure->arg_users_[i], manager);
|
||||
(void)EraseArg(closure->arg_indexes_[i], closure->arg_users_[i], manager);
|
||||
}
|
||||
}
|
||||
change = true;
|
||||
|
@ -515,8 +515,9 @@ std::shared_ptr<HashMap<std::string, std::vector<AnfNodePtr>>> SearchVisitNodes(
|
|||
|
||||
std::shared_ptr<OrderedSet<FuncGraphPtr>> GetAllFuncGraphs(const std::vector<AnfNodePtr> &value_nodes) {
|
||||
auto func_graphs = std::make_shared<OrderedSet<FuncGraphPtr>>();
|
||||
std::for_each(value_nodes.begin(), value_nodes.end(),
|
||||
[&func_graphs](const AnfNodePtr &node) { func_graphs->insert(GetValueNode<FuncGraphPtr>(node)); });
|
||||
(void)std::for_each(value_nodes.begin(), value_nodes.end(), [&func_graphs](const AnfNodePtr &node) {
|
||||
func_graphs->insert(GetValueNode<FuncGraphPtr>(node));
|
||||
});
|
||||
return func_graphs;
|
||||
}
|
||||
|
||||
|
|
|
@ -125,7 +125,7 @@ AnfNodePtr GetTransformedKeyNode(const AnfNodePtr &old_key_node, SymbolicKeyConv
|
|||
} else {
|
||||
static int64_t key_counter = 0;
|
||||
transformed_key = ++key_counter;
|
||||
symbolic_key_map.emplace(std::make_pair(symbolic_key_inst, transformed_key));
|
||||
(void)symbolic_key_map.emplace(std::make_pair(symbolic_key_inst, transformed_key));
|
||||
}
|
||||
auto tensor_key = std::make_shared<mindspore::tensor::Tensor>(transformed_key);
|
||||
auto transformed_key_node = NewValueNode(tensor_key);
|
||||
|
@ -152,7 +152,7 @@ bool EnvironConversion(const pipeline::ResourcePtr &resource) {
|
|||
SymbolicKeyConversionMap symbolic_key_map;
|
||||
static AbstractBasePtr scalar_abs = std::make_shared<abstract::AbstractScalar>(kAnyValue, kInt64);
|
||||
static AbstractBasePtr tensor_abs = std::make_shared<abstract::AbstractTensor>(scalar_abs);
|
||||
static std::string attr_name = "value_type";
|
||||
static const std::string attr_name = "value_type";
|
||||
const int kPrimitiveOffset = 0;
|
||||
const int kEnvironTypeOffset = 1;
|
||||
const int kSymbolicKeyOffset = 2;
|
||||
|
@ -171,7 +171,7 @@ bool EnvironConversion(const pipeline::ResourcePtr &resource) {
|
|||
const auto &type_id = GetValueType(cnode);
|
||||
if (type_id == kObjectTypeMonad) {
|
||||
if (IsPrimitiveCNode(node, prim::kPrimEnvironSet)) {
|
||||
txn.Replace(cnode, cnode->input(kEnvironTypeOffset));
|
||||
(void)txn.Replace(cnode, cnode->input(kEnvironTypeOffset));
|
||||
continue;
|
||||
} else {
|
||||
MS_LOG(EXCEPTION) << "Should be eliminated, but node: " << cnode->DebugString();
|
||||
|
|
|
@ -559,7 +559,10 @@ bool ConvertSwitchReplacement::CheckSwitchWrapNode(const AnfNodePtr &node) {
|
|||
|
||||
void ConvertSwitchReplacement::TransformSwitchBranchReplace(const AnfNodePtr &node) {
|
||||
auto cnode = node->cast<CNodePtr>();
|
||||
auto switch_cnode = cnode->input(0)->cast<CNodePtr>();
|
||||
MS_EXCEPTION_IF_NULL(cnode);
|
||||
constexpr size_t input_index = 0;
|
||||
auto switch_cnode = cnode->input(input_index)->cast<CNodePtr>();
|
||||
MS_EXCEPTION_IF_NULL(switch_cnode);
|
||||
auto cond = switch_cnode->input(kCondIndex);
|
||||
auto true_br = switch_cnode->input(kTrueBranchIndex);
|
||||
auto false_br = switch_cnode->input(kFalseBranchIndex);
|
||||
|
@ -572,7 +575,7 @@ void ConvertSwitchReplacement::TransformSwitchBranchReplace(const AnfNodePtr &no
|
|||
auto trans_g2 = internal::TransformGraphCondFalseBranchNodes(g2, cond);
|
||||
|
||||
std::vector<AnfNodePtr> params;
|
||||
if (cnode && cnode->size() > 1) {
|
||||
if (cnode->size() > 1) {
|
||||
// There are arguments for the call of switch result,
|
||||
// usually these are monad states added by auto-monad.
|
||||
for (size_t i = 1; i < cnode->size(); ++i) {
|
||||
|
|
|
@ -648,6 +648,7 @@ class AllReduceConstElim : public OptimizerCaller {
|
|||
auto prim_cnode = pattern.GetOriginalNode();
|
||||
MS_EXCEPTION_IF_NULL(prim_cnode);
|
||||
auto primitive = GetCNodePrimitive(prim_cnode);
|
||||
MS_EXCEPTION_IF_NULL(primitive);
|
||||
auto reduce_op = primitive->GetAttr("op");
|
||||
auto group = primitive->GetAttr("group")->ToString();
|
||||
// For sum operation, multiply constant tensor by number of devices
|
||||
|
|
|
@ -167,7 +167,7 @@ AnfNodePtr EliminateUpdateStateWithDepend(const CNodePtr &update_state) {
|
|||
return input_monad;
|
||||
}
|
||||
|
||||
bool ExistEnvironGet(FuncGraphManagerPtr manager) {
|
||||
bool ExistEnvironGet(const FuncGraphManagerPtr &manager) {
|
||||
const FuncGraphSet &fgs = manager->func_graphs();
|
||||
for (auto &fg : fgs) {
|
||||
auto &nodes = fg->value_nodes();
|
||||
|
@ -198,10 +198,10 @@ AnfNodePtr EliminateUpdateStateMakeTupleWithUselessEnv(const CNodePtr &update_st
|
|||
for (size_t i = 1; i < input_size; i++) {
|
||||
auto node = make_tuple->input(i);
|
||||
if (IsPrimitiveCNode(node, prim::kPrimEnvironSet) && OnlyUsedByOneNode(node, make_tuple)) {
|
||||
env_nodes.emplace_back(node);
|
||||
(void)env_nodes.emplace_back(node);
|
||||
has_environ_set = true;
|
||||
} else if (node->isa<CNode>() && !IsPrimitiveCNode(node, prim::kPrimUpdateState)) {
|
||||
new_maketuple_inputs.emplace_back(node);
|
||||
(void)new_maketuple_inputs.emplace_back(node);
|
||||
}
|
||||
}
|
||||
if (!has_environ_set) {
|
||||
|
@ -229,8 +229,8 @@ AnfNodePtr EliminateUpdateStateMakeTupleWithUselessEnv(const CNodePtr &update_st
|
|||
auto env_input = env_cnode->input(first_index);
|
||||
auto attach = env_cnode->input(attach_index);
|
||||
if (IsPrimitiveCNode(env_input, prim::kPrimEnvironSet) && OnlyUsedByOneNode(env_input, env_cnode)) {
|
||||
env_nodes.emplace_back(env_input);
|
||||
new_maketuple_inputs.insert(new_maketuple_inputs.begin() + no_env_node_size, attach);
|
||||
(void)env_nodes.emplace_back(env_input);
|
||||
(void)new_maketuple_inputs.insert(new_maketuple_inputs.begin() + no_env_node_size, attach);
|
||||
}
|
||||
}
|
||||
if (new_maketuple_inputs.size() == 1) {
|
||||
|
@ -241,8 +241,9 @@ AnfNodePtr EliminateUpdateStateMakeTupleWithUselessEnv(const CNodePtr &update_st
|
|||
return nullptr;
|
||||
}
|
||||
abstract::AbstractBasePtrList element_abstracts;
|
||||
std::transform(new_maketuple_inputs.begin() + 1, new_maketuple_inputs.end(), std::back_inserter(element_abstracts),
|
||||
[](const AnfNodePtr &input) { return input->abstract(); });
|
||||
(void)std::transform(new_maketuple_inputs.begin() + 1, new_maketuple_inputs.end(),
|
||||
std::back_inserter(element_abstracts),
|
||||
[](const AnfNodePtr &input) { return input->abstract(); });
|
||||
auto new_make_tuple = fg->NewCNode(new_maketuple_inputs);
|
||||
new_make_tuple->set_abstract(std::make_shared<abstract::AbstractTuple>(element_abstracts));
|
||||
auto new_update_state =
|
||||
|
@ -735,9 +736,9 @@ AnfNodePtr UpdatestateUselessNodeEliminater::operator()(const OptimizerPtr &, co
|
|||
// UpdateState(u, MakeTuple(Function, input) -> UpdateState(u, input)
|
||||
// UpdateState(u, MakeTuple(input, Function) -> UpdateState(u, input)
|
||||
if (IsPrimitiveCNode(attach, prim::kPrimMakeTuple)) {
|
||||
auto node = EliminateUpdateStateMakeTupleWithUselessNode(update_state_node, attach->cast<CNodePtr>());
|
||||
if (node != nullptr) {
|
||||
return node;
|
||||
auto new_node = EliminateUpdateStateMakeTupleWithUselessNode(update_state_node, attach->cast<CNodePtr>());
|
||||
if (new_node != nullptr) {
|
||||
return new_node;
|
||||
}
|
||||
return EliminateUpdateStateMakeTupleWithUselessEnv(update_state_node, attach->cast<CNodePtr>());
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ CNodePtr CreateStridedSliceCNode(const parallel::Shape &begin, const parallel::S
|
|||
return new_node;
|
||||
}
|
||||
|
||||
CNodePtr CreateAllGatherCNode(const AnfNodePtr &node, std::string group) {
|
||||
CNodePtr CreateAllGatherCNode(const AnfNodePtr &node, const std::string &group) {
|
||||
auto op = parallel::CreateAllGatherOp(group);
|
||||
auto allgather_input = parallel::CreateInput(op, node, "recompute_slice_allgather");
|
||||
auto func_graph = node->func_graph();
|
||||
|
@ -59,7 +59,7 @@ std::vector<parallel::Group> InferRepeatedRankList(const CNodePtr &cnode) {
|
|||
auto tensor_layout = output_info[0].tensor_layout();
|
||||
auto tensor_map = tensor_layout.origin_tensor_map();
|
||||
std::vector<parallel::Group> groups;
|
||||
operator_info->CreateGroupByTensorMap(tensor_map.array(), &groups);
|
||||
(void)operator_info->CreateGroupByTensorMap(tensor_map.array(), &groups);
|
||||
return groups;
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,7 @@ void InsertSliceAllGatherNode(const std::vector<std::pair<std::shared_ptr<AnfNod
|
|||
}
|
||||
int64_t global_rank_id = parallel::g_device_manager->global_rank();
|
||||
int64_t stage_num = parallel::g_device_manager->stage_num();
|
||||
int64_t device_num = parallel::g_device_manager->DeviceNum();
|
||||
int64_t device_num = SizeToLong(parallel::g_device_manager->DeviceNum());
|
||||
int64_t stage_device_num = device_num / stage_num;
|
||||
int64_t local_rank_id = global_rank_id % stage_device_num;
|
||||
auto groups = InferRepeatedRankList(node);
|
||||
|
@ -120,7 +120,7 @@ void InsertSliceAllGatherNode(const std::vector<std::pair<std::shared_ptr<AnfNod
|
|||
<< "The slice would not activate to this node: " << node->DebugString();
|
||||
return;
|
||||
}
|
||||
int64_t group_deivce_num = group.GetDevNum();
|
||||
int64_t group_deivce_num = SizeToLong(group.GetDevNum());
|
||||
std::vector<int64_t> slice_begin(out_shape_element.size(), 0);
|
||||
slice_begin[0] = (local_rank_id % group_deivce_num) * (out_shape_element[0] / group_deivce_num);
|
||||
std::vector<int64_t> slice_end = out_shape_element;
|
||||
|
@ -142,7 +142,7 @@ void InsertSliceAllGatherNode(const std::vector<std::pair<std::shared_ptr<AnfNod
|
|||
if (node->HasPrimalAttr(parallel::MICRO)) {
|
||||
allgather_cnode->AddPrimalAttr(parallel::MICRO, node->GetPrimalAttr(parallel::MICRO));
|
||||
}
|
||||
manager->Replace(slice_cnode, allgather_cnode);
|
||||
(void)manager->Replace(slice_cnode, allgather_cnode);
|
||||
slice_allgathers->push_back(allgather_cnode);
|
||||
|
||||
std::vector<AnfNodePtr> depend_inputs{NewValueNode(prim::kPrimDepend), forward_node_user.first, slice_cnode};
|
||||
|
@ -150,7 +150,7 @@ void InsertSliceAllGatherNode(const std::vector<std::pair<std::shared_ptr<AnfNod
|
|||
depend_node->set_abstract(forward_node_user.first->abstract()->Clone());
|
||||
depend_node->AddAttr("slice_forward_depend", MakeValue(true));
|
||||
MS_EXCEPTION_IF_NULL(depend_node);
|
||||
manager->Replace(forward_node_user.first, depend_node);
|
||||
(void)manager->Replace(forward_node_user.first, depend_node);
|
||||
}
|
||||
|
||||
void InsertAllGatherDepend(const FuncGraphPtr &graph, const std::vector<CNodePtr> &slice_allgathers) {
|
||||
|
@ -183,7 +183,7 @@ void InsertAllGatherDepend(const FuncGraphPtr &graph, const std::vector<CNodePtr
|
|||
allgather_depend_node->set_input(1, last_allgather->input(1));
|
||||
allgather_depend_node->set_abstract(last_allgather->input(1)->abstract()->Clone());
|
||||
allgather_depend_node->AddAttr("last_slice_allgather_depend", MakeValue(true));
|
||||
manager->Replace(allgather_depend_node, last_allgather);
|
||||
(void)manager->Replace(allgather_depend_node, last_allgather);
|
||||
manager->SetEdge(last_allgather, 1, allgather_depend_node);
|
||||
}
|
||||
|
||||
|
@ -234,7 +234,6 @@ void SliceRecomputedActivationNodes(const FuncGraphPtr &graph) {
|
|||
!node->has_user_data<parallel::OperatorInfo>()) {
|
||||
continue;
|
||||
}
|
||||
auto node_users = manager->node_users()[node];
|
||||
std::vector<std::pair<std::shared_ptr<AnfNode>, int>> duplicate_users;
|
||||
std::vector<std::pair<std::shared_ptr<AnfNode>, int>> forward_users;
|
||||
GroupingNextNodes(node, &duplicate_users, &forward_users);
|
||||
|
|
|
@ -86,7 +86,7 @@ void UpdateFuncGraphParameter(const FuncGraphPtr &func_graph) {
|
|||
func_graph->set_parameters(new_paras);
|
||||
}
|
||||
|
||||
bool IsDynamicShapeGraph(FuncGraphPtr func_graph) {
|
||||
bool IsDynamicShapeGraph(const FuncGraphPtr &func_graph) {
|
||||
MS_EXCEPTION_IF_NULL(func_graph);
|
||||
std::vector<AnfNodePtr> node_list = TopoSort(func_graph->get_return());
|
||||
return std::any_of(node_list.begin(), node_list.end(),
|
||||
|
@ -1244,7 +1244,7 @@ bool SetMindIRGraphAction(const ResourcePtr &res) {
|
|||
(void)AbstractAnalyze(res, res->func_graph(), broaded_args, true);
|
||||
} else {
|
||||
// Use InferMindir which will find c++ infer in eval_map and backend_eval_map;
|
||||
InferMindir(res->func_graph(), args_spec_list, true);
|
||||
(void)InferMindir(res->func_graph(), args_spec_list, true);
|
||||
}
|
||||
auto it = abstract::AnalysisResultCacheMgr::GetInstance().begin();
|
||||
auto it_end = abstract::AnalysisResultCacheMgr::GetInstance().end();
|
||||
|
|
Loading…
Reference in New Issue