forked from mindspore-Ecosystem/mindspore
clean CXX warning
This commit is contained in:
parent
438cc78623
commit
8a9b388ac3
|
@ -566,7 +566,7 @@ void DumpSubgraph(const OrderedMap<FuncGraphPtr, std::shared_ptr<SubGraphIRInfo>
|
|||
void GetEnvDumpIrLineLevel(LocDumpMode *dump_location) {
|
||||
static std::unordered_map<std::string, enum LocDumpMode> dump_level_map = {
|
||||
{std::to_string(kOff), kOff}, {std::to_string(kTopStack), kTopStack}, {std::to_string(kWholeStack), kWholeStack}};
|
||||
static auto dump_level_in_env = common::GetEnv("ENV_DUMP_IR_LINE_LEVEL");
|
||||
static const auto dump_level_in_env = common::GetEnv("ENV_DUMP_IR_LINE_LEVEL");
|
||||
auto it = dump_level_map.find(dump_level_in_env);
|
||||
if (it == dump_level_map.end()) {
|
||||
return;
|
||||
|
|
|
@ -548,7 +548,7 @@ void TraceGraphEvalEnter(const abstract::EvaluatorPtr &eval, const abstract::Anf
|
|||
MS_LOG(EXCEPTION) << "GraphInferEnter got null eval";
|
||||
}
|
||||
if (eval->isa<abstract::FuncGraphEvaluator>() || eval->isa<abstract::MetaFuncGraphEvaluator>()) {
|
||||
graph_infer_stack.emplace(std::pair<abstract::EvaluatorPtr, abstract::AnfNodeConfigPtr>(eval, node));
|
||||
(void)graph_infer_stack.emplace(std::pair<abstract::EvaluatorPtr, abstract::AnfNodeConfigPtr>(eval, node));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
|
||||
namespace mindspore {
|
||||
namespace abstract {
|
||||
enum State {
|
||||
enum class State {
|
||||
SAME,
|
||||
X_ONE,
|
||||
Y_ONE,
|
||||
|
@ -106,17 +106,17 @@ void ComputeReduceIndex(const std::vector<int64_t> &reverse_x, const std::vector
|
|||
const int64_t y_i = reverse_y[i];
|
||||
const int64_t reduce_idx = SizeToLong(n - 1 - i);
|
||||
if (x_i == y_i) {
|
||||
curr = SAME;
|
||||
curr = State::SAME;
|
||||
} else if (x_i == 1) {
|
||||
grad_x_reduce_idx->push_back(reduce_idx);
|
||||
curr = X_ONE;
|
||||
curr = State::X_ONE;
|
||||
} else if (y_i == 1) {
|
||||
grad_y_reduce_idy->push_back(reduce_idx);
|
||||
curr = Y_ONE;
|
||||
curr = State::Y_ONE;
|
||||
} else {
|
||||
MS_LOG(EXCEPTION) << "not compatible shape input for BroadcastGradientArgs";
|
||||
}
|
||||
if (curr == SAME && x_i == 1) {
|
||||
if (curr == State::SAME && x_i == 1) {
|
||||
grad_x_reduce_idx->push_back(reduce_idx);
|
||||
grad_y_reduce_idy->push_back(reduce_idx);
|
||||
continue;
|
||||
|
|
|
@ -456,7 +456,8 @@ AnfNodePtr DFunctor::AttachFvDoutToTape(const AnfNodePtr &grad_fv) {
|
|||
node,
|
||||
sens,
|
||||
});
|
||||
fv_adjoint->second->RegisterDoutUser(new_grad_fv->cast<CNodePtr>(), 3);
|
||||
constexpr size_t sens_index = 3;
|
||||
fv_adjoint->second->RegisterDoutUser(new_grad_fv->cast<CNodePtr>(), sens_index);
|
||||
MS_LOG(DEBUG) << "AttachFvDoutToTape add fv sens " << sens->ToString() << " to " << new_grad_fv->ToString() << " "
|
||||
<< fv->ToString() << " " << primal_graph_->ToString() << ".";
|
||||
}
|
||||
|
@ -478,7 +479,8 @@ AnfNodePtr DFunctor::AttachIndirectFvDoutToTape(const AnfNodePtr &grad_fv) {
|
|||
node,
|
||||
sens,
|
||||
});
|
||||
fv_adjoint.second->RegisterDoutUser(new_grad_fv->cast<CNodePtr>(), 3);
|
||||
constexpr size_t sens_index = 3;
|
||||
fv_adjoint.second->RegisterDoutUser(new_grad_fv->cast<CNodePtr>(), sens_index);
|
||||
MS_LOG(DEBUG) << "AttachIndirectFvDoutToTape add indirect fv sens " << sens->ToString() << " to "
|
||||
<< new_grad_fv->ToString() << ".";
|
||||
}
|
||||
|
@ -511,8 +513,9 @@ void DFunctor::MapMorphism() {
|
|||
param_adjoints.push_back(param_adjoint->second);
|
||||
}
|
||||
auto tape_output = tape_->NewCNode(inputs);
|
||||
constexpr size_t offset_num = 2;
|
||||
for (size_t i = 0; i < param_adjoints.size(); ++i) {
|
||||
param_adjoints[i]->RegisterDoutUser(tape_output, i + 2);
|
||||
param_adjoints[i]->RegisterDoutUser(tape_output, i + offset_num);
|
||||
}
|
||||
tape_->set_output(tape_output);
|
||||
// Set output for k_graph_, K:: cnode->forward_app.
|
||||
|
|
|
@ -312,7 +312,8 @@ void KPrim::TransformArgsForFuncGraph(const FuncGraphManagerPtr &mng, const Func
|
|||
std::vector<AnfNodePtr> *const transf_args) {
|
||||
MS_EXCEPTION_IF_NULL(mng);
|
||||
TransformNormalArgs(mng, bprop_fg, outer, transf_args);
|
||||
auto bprop_fg_param_size = bprop_fg->parameters().size() - 2;
|
||||
constexpr size_t need_filter_size = 2;
|
||||
auto bprop_fg_param_size = bprop_fg->parameters().size() - need_filter_size;
|
||||
// current_primal_fg may have extra parameters after AutoMonad
|
||||
const auto ¤t_primal_fg_params = current_primal_fg->parameters();
|
||||
if (bprop_fg_param_size < current_primal_fg_params.size()) {
|
||||
|
@ -359,7 +360,10 @@ void KPrim::CheckBprop(const FuncGraphPtr &bprop_fg, const string &prim_to_check
|
|||
|
||||
std::vector<AnfNodePtr> inputs;
|
||||
inputs.emplace_back(NewValueNode(prim::kPrimMakeTuple));
|
||||
inputs.insert(inputs.begin() + 1, bprop_fg->parameters().begin(), bprop_fg->parameters().end() - 2);
|
||||
constexpr int primitive_size = 1;
|
||||
constexpr int brprop_offset_size = 2;
|
||||
inputs.insert(inputs.begin() + primitive_size, bprop_fg->parameters().begin(),
|
||||
bprop_fg->parameters().end() - brprop_offset_size);
|
||||
AnfNodePtr params = bprop_fg->NewCNode(inputs);
|
||||
|
||||
inputs.clear();
|
||||
|
|
|
@ -286,11 +286,12 @@ AnfNodePtr ConvertListGetItemToTupleGetItem(const CNodePtr &node) {
|
|||
|
||||
const auto &inputs = node->inputs();
|
||||
// Inputs should be [list_getitem, list, item]
|
||||
const size_t expect_inputs_size = 3;
|
||||
CheckInputsSize(inputs.size(), expect_inputs_size, GetCNodeFuncName(node));
|
||||
|
||||
AnfNodePtr data = inputs[1];
|
||||
AnfNodePtr cons = inputs[2];
|
||||
constexpr size_t expect_input_size = 3;
|
||||
CheckInputsSize(inputs.size(), expect_input_size, GetCNodeFuncName(node));
|
||||
constexpr size_t real_input_index = 1;
|
||||
constexpr size_t index_input_index = 2;
|
||||
AnfNodePtr data = inputs[real_input_index];
|
||||
AnfNodePtr cons = inputs[index_input_index];
|
||||
MS_EXCEPTION_IF_NULL(data);
|
||||
MS_EXCEPTION_IF_NULL(cons);
|
||||
|
||||
|
@ -337,9 +338,10 @@ AnfNodePtr EraseMakeKeywordArgNode(const CNodePtr &node) {
|
|||
MS_EXCEPTION_IF_NULL(node);
|
||||
const auto &inputs = node->inputs();
|
||||
// Inputs should be [make_keyword_arg, key, value]
|
||||
const size_t expect_inputs_size = 3;
|
||||
CheckInputsSize(inputs.size(), expect_inputs_size, GetCNodeFuncName(node));
|
||||
return inputs[2];
|
||||
constexpr size_t expect_input_size = 3;
|
||||
constexpr size_t value_inputs_index = 2;
|
||||
CheckInputsSize(inputs.size(), expect_input_size, GetCNodeFuncName(node));
|
||||
return inputs[value_inputs_index];
|
||||
}
|
||||
|
||||
AnfNodePtr EraseExtractKeywordArg(const CNodePtr &node) {
|
||||
|
@ -455,10 +457,10 @@ AnfNodePtr ConvertSparseGetAttrToTupleGetItem(const CNodePtr &node, const int64_
|
|||
|
||||
const auto &inputs = node->inputs();
|
||||
// Inputs should be [sparse_getattr, sparse]
|
||||
const size_t expect_inputs_size = 2;
|
||||
CheckInputsSize(inputs.size(), expect_inputs_size, GetCNodeFuncName(node));
|
||||
|
||||
AnfNodePtr sparse = inputs[1];
|
||||
constexpr size_t expect_input_index = 2;
|
||||
CheckInputsSize(inputs.size(), expect_input_index, GetCNodeFuncName(node));
|
||||
constexpr size_t sparse_index = 1;
|
||||
AnfNodePtr sparse = inputs[sparse_index];
|
||||
MS_EXCEPTION_IF_NULL(sparse);
|
||||
auto cons_node = NewValueNode(index);
|
||||
AbstractBasePtr aptr = std::make_shared<AbstractScalar>(std::make_shared<Int64Imm>(index));
|
||||
|
@ -492,13 +494,16 @@ bool CleanAfterOptA(const FuncGraphPtr &root, const FuncGraphManagerPtr &manager
|
|||
new_node = ConvertMakeSparseToMakeTuple(cnode);
|
||||
} else if (IsPrimitiveCNode(node, prim::kPrimSparseTensorGetIndices) ||
|
||||
IsPrimitiveCNode(node, prim::kPrimRowTensorGetIndices)) {
|
||||
new_node = ConvertSparseGetAttrToTupleGetItem(cnode, 0);
|
||||
constexpr int64_t indices_index = 0;
|
||||
new_node = ConvertSparseGetAttrToTupleGetItem(cnode, indices_index);
|
||||
} else if (IsPrimitiveCNode(node, prim::kPrimSparseTensorGetValues) ||
|
||||
IsPrimitiveCNode(node, prim::kPrimRowTensorGetValues)) {
|
||||
new_node = ConvertSparseGetAttrToTupleGetItem(cnode, 1);
|
||||
constexpr int64_t value_index = 1;
|
||||
new_node = ConvertSparseGetAttrToTupleGetItem(cnode, value_index);
|
||||
} else if (IsPrimitiveCNode(node, prim::kPrimSparseTensorGetDenseShape) ||
|
||||
IsPrimitiveCNode(node, prim::kPrimRowTensorGetDenseShape)) {
|
||||
new_node = ConvertSparseGetAttrToTupleGetItem(cnode, 2);
|
||||
constexpr int64_t shape_index = 2;
|
||||
new_node = ConvertSparseGetAttrToTupleGetItem(cnode, shape_index);
|
||||
}
|
||||
|
||||
if (new_node != nullptr) {
|
||||
|
|
|
@ -30,7 +30,7 @@ namespace mindspore {
|
|||
namespace opt {
|
||||
namespace irpass {
|
||||
static AnfNodePtr GenerateUnpackGraphNode(const AnfNodePtr &origin_node, std::vector<AnfNodePtr> inputs_y,
|
||||
AnfNodePtr func_node, bool is_unpack, bool sens_param) {
|
||||
const AnfNodePtr &func_node, bool is_unpack, bool sens_param) {
|
||||
MS_EXCEPTION_IF_NULL(func_node);
|
||||
FuncGraphPtr func_graph = origin_node->func_graph();
|
||||
MS_EXCEPTION_IF_NULL(func_graph);
|
||||
|
@ -42,8 +42,8 @@ static AnfNodePtr GenerateUnpackGraphNode(const AnfNodePtr &origin_node, std::ve
|
|||
nodes.push_back(func_node);
|
||||
// {unpackcall, {GradOperation, ...}, args...}
|
||||
const size_t inputs_begin_index = 2;
|
||||
std::transform(inputs_y.begin() + inputs_begin_index, inputs_y.end(), std::back_inserter(nodes),
|
||||
[](const AnfNodePtr &node) { return node; });
|
||||
(void)std::transform(inputs_y.begin() + inputs_begin_index, inputs_y.end(), std::back_inserter(nodes),
|
||||
[](const AnfNodePtr &node) { return node; });
|
||||
unpack_graph_node = func_graph->NewCNodeBefore(origin_node, nodes);
|
||||
} else {
|
||||
auto unpack_graph = std::make_shared<prim::UnpackGraphPrimitive>("unpack_graph", sens_param, false);
|
||||
|
@ -51,8 +51,8 @@ static AnfNodePtr GenerateUnpackGraphNode(const AnfNodePtr &origin_node, std::ve
|
|||
nodes.push_back(func_node);
|
||||
// {{GradOperation, ...}, args...}
|
||||
const size_t inputs_begin_index = 1;
|
||||
std::transform(inputs_y.begin() + inputs_begin_index, inputs_y.end(), std::back_inserter(nodes),
|
||||
[](const AnfNodePtr &node) { return node; });
|
||||
(void)std::transform(inputs_y.begin() + inputs_begin_index, inputs_y.end(), std::back_inserter(nodes),
|
||||
[](const AnfNodePtr &node) { return node; });
|
||||
unpack_graph_node = func_graph->NewCNodeBefore(origin_node, nodes);
|
||||
}
|
||||
return unpack_graph_node;
|
||||
|
|
|
@ -292,7 +292,7 @@ bool CombineLikeGraphs(const ResourcePtr &res) {
|
|||
continue;
|
||||
}
|
||||
auto repl_n = cloned->cast<CNodePtr>();
|
||||
repl_n->set_input(n.second, param);
|
||||
repl_n->set_input(IntToSize(n.second), param);
|
||||
}
|
||||
}
|
||||
MS_LOG(DEBUG) << "Fg0 used_global_parameters size :" << fg->used_global_parameters().size();
|
||||
|
|
|
@ -793,7 +793,7 @@ void Pipeline::Run() {
|
|||
#ifdef ENABLE_DUMP_IR
|
||||
if (mindspore::RecorderManager::Instance().RdrEnable()) {
|
||||
MS_LOG(INFO) << "Recording FuncGraph in pipeline using RDR.";
|
||||
std::string name = GetBaseNameForIR(i, action.first);
|
||||
std::string name = GetBaseNameForIR(SizeToLong(i), action.first);
|
||||
if (graph != nullptr) {
|
||||
auto graph_clone = BasicClone(graph);
|
||||
if (graph_clone != nullptr) {
|
||||
|
@ -814,7 +814,7 @@ void Pipeline::Run() {
|
|||
|
||||
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG) && graph != nullptr) {
|
||||
user_graph = graph;
|
||||
std::string base_name = GetBaseNameForIR(i, action.first);
|
||||
std::string base_name = GetBaseNameForIR(SizeToLong(i), action.first);
|
||||
|
||||
// generate IR file in dot format, which can be converted to svg file using graphviz dot command
|
||||
draw::Draw(base_name + ".dot", graph);
|
||||
|
@ -1000,7 +1000,7 @@ void ExecutorPy::UpdataParamNodeDefaultInput(const std::string &phase,
|
|||
}
|
||||
}
|
||||
|
||||
void ExecutorPy::RunInitGraph(const py::dict &init_params, const std::string &phase) {
|
||||
void ExecutorPy::RunInitGraph(const py::dict &init_params, const std::string &phase) const {
|
||||
#ifdef ENABLE_GE
|
||||
RunGEInitGraph(init_params, phase);
|
||||
#endif
|
||||
|
@ -1023,7 +1023,7 @@ bool InitExecDataset(const std::string &queue_name, int64_t iter_num, int64_t ba
|
|||
auto ms_context = MsContext::GetInstance();
|
||||
MS_EXCEPTION_IF_NULL(ms_context);
|
||||
if (!context::IsTsdOpened(ms_context) || !context::IsGeInited(ms_context)) {
|
||||
(void)InitPipeline();
|
||||
InitPipeline();
|
||||
}
|
||||
#endif
|
||||
if (iter_num == -1) {
|
||||
|
|
|
@ -89,7 +89,7 @@ class ExecutorPy : public std::enable_shared_from_this<ExecutorPy> {
|
|||
const py::object &broadcast_params = {});
|
||||
void UpdataParamNodeDefaultInput(const std::string &phase,
|
||||
const std::unordered_map<std::string, tensor::TensorPtr> ¶ms);
|
||||
void RunInitGraph(const py::dict &init_params, const std::string &phase);
|
||||
void RunInitGraph(const py::dict &init_params, const std::string &phase) const;
|
||||
void PyExePath(const py::object &phase);
|
||||
py::dict GetParameterLayout(const std::string &phase);
|
||||
py::dict GetCNodeStrategy(const std::string &phase);
|
||||
|
|
|
@ -794,7 +794,7 @@ inline void AddToManager(const AnalysisEnginePtr &engine, const FuncGraphPtr fun
|
|||
manager->AddFuncGraph(func_graph);
|
||||
}
|
||||
|
||||
enum REQUIRE_TYPE { ATTR, METHOD };
|
||||
enum class REQUIRE_TYPE { ATTR, METHOD };
|
||||
|
||||
EvalResultPtr StaticGetterInferred(const ValuePtr &value, const ConfigPtr &data_conf, const AnfNodeConfigPtr &old_conf,
|
||||
REQUIRE_TYPE require_type = REQUIRE_TYPE::METHOD) {
|
||||
|
|
|
@ -96,9 +96,10 @@ py::tuple check_bprop_out(const py::object &grads_obj, const py::tuple &py_args)
|
|||
} else {
|
||||
grads = py::cast<py::tuple>(grads_obj);
|
||||
}
|
||||
if (grads.size() != py_args.size() - 2) {
|
||||
constexpr int filter_args_size = 2;
|
||||
if (grads.size() != py_args.size() - filter_args_size) {
|
||||
MS_EXCEPTION(TypeError) << "For user define net bprop, the gradients number: " << grads.size()
|
||||
<< " is not equal to the args number: " << py_args.size() - 2 << ".";
|
||||
<< " is not equal to the args number: " << (py_args.size() - filter_args_size) << ".";
|
||||
}
|
||||
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_CHECK_BPROP_FLAG)) {
|
||||
for (size_t i = 0; i < grads.size(); i++) {
|
||||
|
|
|
@ -191,15 +191,15 @@ GraphDebugInfoPtr FuncGraph::debug_info() {
|
|||
return this->debug_info_;
|
||||
}
|
||||
|
||||
const AnfNodeSet &FuncGraph::nodes() { return nodes_; }
|
||||
const AnfNodeSet &FuncGraph::nodes() const { return nodes_; }
|
||||
|
||||
void FuncGraph::CopyNodes(const FuncGraphPtr &source) { nodes_ = source->nodes(); }
|
||||
|
||||
void FuncGraph::ClearNodes() { nodes_.clear(); }
|
||||
|
||||
void FuncGraph::AddNode(AnfNodePtr node) { nodes_.add(node); }
|
||||
void FuncGraph::AddNode(const AnfNodePtr &node) { nodes_.add(node); }
|
||||
|
||||
void FuncGraph::DropNode(AnfNodePtr node) {
|
||||
void FuncGraph::DropNode(const AnfNodePtr &node) {
|
||||
nodes_.erase(node);
|
||||
auto graph = node->func_graph();
|
||||
// Remove the node from order list.
|
||||
|
@ -208,7 +208,7 @@ void FuncGraph::DropNode(AnfNodePtr node) {
|
|||
}
|
||||
}
|
||||
|
||||
const AnfNodeCounterMap &FuncGraph::value_nodes() { return value_nodes_; }
|
||||
const AnfNodeCounterMap &FuncGraph::value_nodes() const { return value_nodes_; }
|
||||
|
||||
void FuncGraph::CopyValueNodes(const FuncGraphPtr &source) {
|
||||
auto &others = source->value_nodes();
|
||||
|
@ -219,7 +219,7 @@ void FuncGraph::CopyValueNodes(const FuncGraphPtr &source) {
|
|||
|
||||
void FuncGraph::ClearValueNodes() { value_nodes_.clear(); }
|
||||
|
||||
void FuncGraph::AddValueNode(AnfNodePtr node, int count) {
|
||||
void FuncGraph::AddValueNode(const AnfNodePtr &node, int count) {
|
||||
if (value_nodes_.count(node) == 0) {
|
||||
value_nodes_[node] = count;
|
||||
} else {
|
||||
|
@ -227,7 +227,7 @@ void FuncGraph::AddValueNode(AnfNodePtr node, int count) {
|
|||
}
|
||||
}
|
||||
|
||||
void FuncGraph::DropValueNode(AnfNodePtr node) {
|
||||
void FuncGraph::DropValueNode(const AnfNodePtr &node) {
|
||||
if (value_nodes_.count(node) != 0) {
|
||||
if (value_nodes_[node] == 1) {
|
||||
(void)value_nodes_.erase(node);
|
||||
|
@ -241,7 +241,7 @@ void FuncGraph::DropValueNode(AnfNodePtr node) {
|
|||
}
|
||||
}
|
||||
|
||||
const AnfNodeCounterMap &FuncGraph::free_variables() { return free_variables_; }
|
||||
const AnfNodeCounterMap &FuncGraph::free_variables() const { return free_variables_; }
|
||||
|
||||
void FuncGraph::CopyFreeVariables(const FuncGraphPtr &source) {
|
||||
auto &others = source->free_variables();
|
||||
|
@ -254,7 +254,7 @@ void FuncGraph::CopyFreeVariables(const FuncGraphPtr &source) {
|
|||
|
||||
void FuncGraph::ClearFreeVariables() { free_variables_.clear(); }
|
||||
|
||||
bool FuncGraph::AddFreeVariable(AnfNodePtr node, int count) {
|
||||
bool FuncGraph::AddFreeVariable(const AnfNodePtr &node, int count) {
|
||||
if (free_variables_.count(node) == 0) {
|
||||
free_variables_[node] = count;
|
||||
return true;
|
||||
|
@ -264,7 +264,7 @@ bool FuncGraph::AddFreeVariable(AnfNodePtr node, int count) {
|
|||
}
|
||||
}
|
||||
|
||||
bool FuncGraph::DropFreeVariable(AnfNodePtr node) {
|
||||
bool FuncGraph::DropFreeVariable(const AnfNodePtr &node) {
|
||||
if (free_variables_.count(node) != 0) {
|
||||
if (free_variables_[node] == 1) {
|
||||
(void)free_variables_.erase(node);
|
||||
|
@ -313,7 +313,7 @@ std::vector<FuncGraphPtr> FuncGraph::free_variables_func_graphs() {
|
|||
return func_graphs;
|
||||
}
|
||||
|
||||
const FuncGraphCounterMap &FuncGraph::func_graphs_used() { return func_graphs_used_; }
|
||||
const FuncGraphCounterMap &FuncGraph::func_graphs_used() const { return func_graphs_used_; }
|
||||
|
||||
void FuncGraph::CopyFuncGraphsUsed(const FuncGraphPtr &source) {
|
||||
auto &others = source->func_graphs_used();
|
||||
|
@ -325,7 +325,7 @@ void FuncGraph::CopyFuncGraphsUsed(const FuncGraphPtr &source) {
|
|||
|
||||
void FuncGraph::ClearFuncGraphsUsed() { func_graphs_used_.clear(); }
|
||||
|
||||
bool FuncGraph::AddFuncGraphUsed(FuncGraphPtr fg, int count) {
|
||||
bool FuncGraph::AddFuncGraphUsed(const FuncGraphPtr &fg, int count) {
|
||||
if (func_graphs_used_.count(fg) == 0) {
|
||||
func_graphs_used_[fg] = count;
|
||||
return true;
|
||||
|
@ -335,7 +335,7 @@ bool FuncGraph::AddFuncGraphUsed(FuncGraphPtr fg, int count) {
|
|||
}
|
||||
}
|
||||
|
||||
bool FuncGraph::DropFuncGraphUsed(FuncGraphPtr fg) {
|
||||
bool FuncGraph::DropFuncGraphUsed(const FuncGraphPtr &fg) {
|
||||
if (func_graphs_used_.count(fg) != 0) {
|
||||
if (func_graphs_used_[fg] == 1) {
|
||||
(void)func_graphs_used_.erase(fg);
|
||||
|
@ -358,7 +358,7 @@ const FuncGraphSet &FuncGraph::func_graphs_used_total() {
|
|||
return used;
|
||||
}
|
||||
|
||||
const CNodeIndexCounterMap &FuncGraph::func_graph_cnodes_index() { return func_graph_cnodes_index_; }
|
||||
const CNodeIndexCounterMap &FuncGraph::func_graph_cnodes_index() const { return func_graph_cnodes_index_; }
|
||||
|
||||
void FuncGraph::CopyFuncGraphCNodesIndex(const FuncGraphPtr &source) {
|
||||
auto &others = source->func_graph_cnodes_index();
|
||||
|
@ -374,7 +374,7 @@ void FuncGraph::CopyFuncGraphCNodesIndex(const FuncGraphPtr &source) {
|
|||
|
||||
void FuncGraph::ClearFuncGraphCNodesIndex() { func_graph_cnodes_index_.clear(); }
|
||||
|
||||
void FuncGraph::AddFuncGraphCNodeIndex(CNodeIndexPairPtr pair, int count) {
|
||||
void FuncGraph::AddFuncGraphCNodeIndex(const CNodeIndexPairPtr &pair, int count) {
|
||||
if (func_graph_cnodes_index_.count(pair) == 0) {
|
||||
func_graph_cnodes_index_[pair] = count;
|
||||
} else {
|
||||
|
@ -382,7 +382,7 @@ void FuncGraph::AddFuncGraphCNodeIndex(CNodeIndexPairPtr pair, int count) {
|
|||
}
|
||||
}
|
||||
|
||||
void FuncGraph::DropFuncGraphCNodeIndex(CNodeIndexPairPtr pair) {
|
||||
void FuncGraph::DropFuncGraphCNodeIndex(const CNodeIndexPairPtr &pair) {
|
||||
if (func_graph_cnodes_index_.count(pair) != 0) {
|
||||
if (func_graph_cnodes_index_[pair] == 1) {
|
||||
(void)func_graph_cnodes_index_.erase(pair);
|
||||
|
@ -396,7 +396,7 @@ void FuncGraph::DropFuncGraphCNodeIndex(CNodeIndexPairPtr pair) {
|
|||
}
|
||||
}
|
||||
|
||||
const std::unordered_map<AnfNodePtr, int> &FuncGraph::j_value_nodes() { return j_value_nodes_; }
|
||||
const std::unordered_map<AnfNodePtr, int> &FuncGraph::j_value_nodes() const { return j_value_nodes_; }
|
||||
|
||||
void FuncGraph::CopyJValueNodes(const FuncGraphPtr &source) {
|
||||
auto &others = source->j_value_nodes();
|
||||
|
|
|
@ -257,25 +257,25 @@ class FuncGraph : public FuncGraphBase, public EffectInfoHolder {
|
|||
this->debug_info_ = info;
|
||||
}
|
||||
// Get all nodes belonging to this func graph.
|
||||
const AnfNodeSet &nodes();
|
||||
const AnfNodeSet &nodes() const;
|
||||
void CopyNodes(const FuncGraphPtr &source);
|
||||
void ClearNodes();
|
||||
void AddNode(AnfNodePtr node);
|
||||
void DropNode(AnfNodePtr node);
|
||||
void AddNode(const AnfNodePtr &node);
|
||||
void DropNode(const AnfNodePtr &node);
|
||||
|
||||
// Get all value_nodes belonging to this func graph.
|
||||
const AnfNodeCounterMap &value_nodes();
|
||||
const AnfNodeCounterMap &value_nodes() const;
|
||||
void CopyValueNodes(const FuncGraphPtr &source);
|
||||
void ClearValueNodes();
|
||||
void AddValueNode(AnfNodePtr node, int count = 1);
|
||||
void DropValueNode(AnfNodePtr node);
|
||||
void AddValueNode(const AnfNodePtr &node, int count = 1);
|
||||
void DropValueNode(const AnfNodePtr &node);
|
||||
|
||||
// Get all free vars directly used in this func graph.
|
||||
const AnfNodeCounterMap &free_variables();
|
||||
const AnfNodeCounterMap &free_variables() const;
|
||||
void CopyFreeVariables(const FuncGraphPtr &source);
|
||||
void ClearFreeVariables();
|
||||
bool AddFreeVariable(AnfNodePtr node, int count = 1);
|
||||
bool DropFreeVariable(AnfNodePtr node);
|
||||
bool AddFreeVariable(const AnfNodePtr &node, int count = 1);
|
||||
bool DropFreeVariable(const AnfNodePtr &node);
|
||||
|
||||
// Get all vars required by this func graph.
|
||||
const BaseRefCounterMap &free_variables_total();
|
||||
|
@ -287,14 +287,14 @@ class FuncGraph : public FuncGraphBase, public EffectInfoHolder {
|
|||
std::vector<FuncGraphPtr> free_variables_func_graphs();
|
||||
|
||||
// Get all value nodes of func graph directly used by this func graph.
|
||||
const FuncGraphCounterMap &func_graphs_used();
|
||||
const FuncGraphCounterMap &func_graphs_used() const;
|
||||
void CopyFuncGraphsUsed(const FuncGraphPtr &source);
|
||||
void ClearFuncGraphsUsed();
|
||||
bool AddFuncGraphUsed(FuncGraphPtr fg, int count = 1);
|
||||
bool DropFuncGraphUsed(FuncGraphPtr fg);
|
||||
bool AddFuncGraphUsed(const FuncGraphPtr &fg, int count = 1);
|
||||
bool DropFuncGraphUsed(const FuncGraphPtr &fg);
|
||||
|
||||
// Get all value nodes in the inputs of J directly used by this func graph.
|
||||
const std::unordered_map<AnfNodePtr, int> &j_value_nodes();
|
||||
const std::unordered_map<AnfNodePtr, int> &j_value_nodes() const;
|
||||
void CopyJValueNodes(const FuncGraphPtr &source);
|
||||
void ClearJValueNodes();
|
||||
void AddJValueNode(const AnfNodePtr &value_node, int count = 1);
|
||||
|
@ -304,11 +304,11 @@ class FuncGraph : public FuncGraphBase, public EffectInfoHolder {
|
|||
const FuncGraphSet &func_graphs_used_total();
|
||||
|
||||
// Get all user value nodes of this func graph, by CNode and its input's index.
|
||||
const CNodeIndexCounterMap &func_graph_cnodes_index();
|
||||
const CNodeIndexCounterMap &func_graph_cnodes_index() const;
|
||||
void CopyFuncGraphCNodesIndex(const FuncGraphPtr &source);
|
||||
void ClearFuncGraphCNodesIndex();
|
||||
void AddFuncGraphCNodeIndex(CNodeIndexPairPtr node, int count = 1);
|
||||
void DropFuncGraphCNodeIndex(CNodeIndexPairPtr node);
|
||||
void AddFuncGraphCNodeIndex(const CNodeIndexPairPtr &node, int count = 1);
|
||||
void DropFuncGraphCNodeIndex(const CNodeIndexPairPtr &node);
|
||||
|
||||
// Return the parent of this graph.
|
||||
FuncGraphPtr parent();
|
||||
|
|
|
@ -309,7 +309,8 @@ void DumpTime::Save() {
|
|||
file_out_ << "{\n";
|
||||
file_out_ << " \"traceEvents\": [" << std::endl;
|
||||
file_ss_ >> file_out_.rdbuf();
|
||||
(void)file_out_.seekp(-7, std::ios::end);
|
||||
constexpr int offset = -7;
|
||||
(void)file_out_.seekp(offset, std::ios::end);
|
||||
file_out_ << " ]" << std::endl << " ,\n";
|
||||
file_out_ << " \"displayTimeUnit\": \"ms\"" << std::endl;
|
||||
file_out_ << "}";
|
||||
|
|
Loading…
Reference in New Issue