!47349 【AR】IR调试能力优化

Merge pull request !47349 from 李良灿/save
This commit is contained in:
i-robot 2023-01-09 04:20:41 +00:00 committed by Gitee
commit 76080c8b82
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
41 changed files with 203 additions and 179 deletions

View File

@ -50,8 +50,7 @@ void BackendCommonOptimization(const std::shared_ptr<session::KernelGraph> &kern
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_common_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -79,7 +78,7 @@ void BackendCommonOptimization(const std::shared_ptr<session::KernelGraph> &kern
kernel_graph->SetExecOrderByDefault();
PROF_END(backend_common_optimization);
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_common_after_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -115,8 +114,7 @@ void CommonFinalOptimization(const std::shared_ptr<session::KernelGraph> &kernel
// Dump IR if save_graphs is set.
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
const bool save_graphs = context->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context->CanDump(advanced)) {
std::string filename = "hwopt_common_final_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(filename, kernel_graph);
}
@ -129,8 +127,7 @@ void CommonUnifyMindIR(const std::shared_ptr<session::KernelGraph> &kernel_graph
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name =
"hwopt_common_unify_mindir_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
@ -146,7 +143,7 @@ void CommonUnifyMindIR(const std::shared_ptr<session::KernelGraph> &kernel_graph
(void)opt->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_common_unify_mindir_after_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -167,8 +164,7 @@ void EliminateIllegalDataTypePass(const std::shared_ptr<session::KernelGraph> &k
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name =
"hwopt_common_eliminate_illegal_data_type_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
@ -181,7 +177,7 @@ void EliminateIllegalDataTypePass(const std::shared_ptr<session::KernelGraph> &k
(void)opt->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name =
"hwopt_common_eliminate_illegal_data_type_after_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
@ -195,8 +191,7 @@ void DynamicShapeConvertPass(const std::shared_ptr<session::KernelGraph> &kernel
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name =
"hwopt_d_before_dynamic_shape_convert_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
@ -209,7 +204,7 @@ void DynamicShapeConvertPass(const std::shared_ptr<session::KernelGraph> &kernel
optimizer->AddPassManager(dynamic_shape_convert_pm);
(void)optimizer->Optimize(kernel_graph);
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name =
"hwopt_d_after_dynamic_shape_convert_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);

View File

@ -111,9 +111,8 @@ void PassManager::DumpPassIR(const FuncGraphPtr &func_graph, const std::string &
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
static const auto enable_dump = !GetDumpConfig().disable_backend_dump;
if (save_graphs && enable_dump) {
if (context_ptr->CanDump(fully) && enable_dump) {
std::ostringstream oss;
oss << "verbose_ir_files"
<< "/";

View File

@ -1562,7 +1562,7 @@ void SessionBasic::DumpGraphs(const std::vector<KernelGraphPtr> &graphs) const {
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
bool save_graphs = context_ptr->CanDump(advanced);
auto &json_parser = DumpJsonParser::GetInstance();
json_parser.Parse();
if (!save_graphs && !json_parser.e2e_dump_enabled() && !json_parser.async_dump_enabled() &&

View File

@ -178,8 +178,7 @@ bool Somas::Assign(const KernelGraphPtr &graph_ptr) {
MS_EXCEPTION_IF_NULL(context_ptr);
MS_EXCEPTION_IF_NULL(graph_ptr);
#ifndef ENABLE_SECURITY
auto enable_save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (enable_save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name = "somas_input_graph_" + std::to_string(graph_ptr->graph_id()) + ".ir";
DumpIR(file_name, graph_ptr, true, kWholeStack);
}
@ -198,7 +197,7 @@ bool Somas::GetEnableCacheFlag(const session::KernelGraph &graph) const {
std::pair<bool, std::string> Somas::GetDebugConfig() const {
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
auto enable_save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
auto enable_save_graphs = context_ptr->CanDump(advanced);
auto save_graphs_path = context_ptr->get_param<std::string>(MS_CTX_SAVE_GRAPHS_PATH);
if (save_graphs_path.empty()) {
save_graphs_path = ".";

View File

@ -196,8 +196,7 @@ void SomasSolverPre::Log(const session::KernelGraph &graph, const TensorsDescMap
const vector<vector<size_t>> &continuous_v) const {
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (!save_graphs) {
if (context_ptr->CanDump(advanced)) {
return;
}
SolverInputLog(graph, tensors, continuous_v);

View File

@ -201,9 +201,9 @@ AnfNodePtr TryExpandCNode(const AnfNodePtr &node, const std::function<bool(const
}
}
#ifdef ENABLE_DUMP_IR
auto ms_context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(ms_context);
if (ms_context->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(fully)) {
DumpIR("verbose_ir_files/expand_" + GetCNodeFuncName(node->cast<CNodePtr>()) + ".ir", expand_fg);
}
#endif

View File

@ -46,14 +46,13 @@ FuncGraphVector PartialEliminateMulti(const pipeline::ResourceBasePtr &resource,
if (new_res == nullptr) {
MS_LOG(EXCEPTION) << "Parameter resources is not a pipeline::Resource";
}
#ifdef ENABLE_DUMP_IR
bool save_graphs_flag = MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
#endif
FuncGraphVector opt_fgs;
for (const auto &func_graph : func_graphs) {
auto opt_fg = PartialEliminateOptPass(new_res, func_graph);
#ifdef ENABLE_DUMP_IR
if (save_graphs_flag) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIR("after_opt_" + opt_fg->ToString() + ".ir", opt_fg);
}
#endif
@ -64,14 +63,16 @@ FuncGraphVector PartialEliminateMulti(const pipeline::ResourceBasePtr &resource,
FuncGraphPtr LiftFv(const pipeline::ResourceBasePtr &resource, const FuncGraphPtr &func_graph) {
#ifdef ENABLE_DUMP_IR
bool save_graphs_flag = MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs_flag) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
bool enable_save_graphs = context->CanDump(advanced);
if (enable_save_graphs) {
DumpIR("before_lift_" + func_graph->ToString() + ".ir", func_graph);
}
#endif
FuncGraphPtr new_fg = LiftingClone(func_graph);
#ifdef ENABLE_DUMP_IR
if (save_graphs_flag) {
if (enable_save_graphs) {
DumpIR("after_lift_" + new_fg->ToString() + ".ir", new_fg);
}
#endif
@ -81,7 +82,7 @@ FuncGraphPtr LiftFv(const pipeline::ResourceBasePtr &resource, const FuncGraphPt
}
auto opt_fg = PartialEliminateOptPass(new_res, new_fg);
#ifdef ENABLE_DUMP_IR
if (save_graphs_flag) {
if (enable_save_graphs) {
DumpIR("after_opt_" + opt_fg->ToString() + ".ir", opt_fg);
}
#endif
@ -90,8 +91,9 @@ FuncGraphPtr LiftFv(const pipeline::ResourceBasePtr &resource, const FuncGraphPt
FuncGraphVector LiftFvMulti(const pipeline::ResourceBasePtr &resource, const FuncGraphVector &func_graphs) {
#ifdef ENABLE_DUMP_IR
bool save_graphs_flag = MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs_flag) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
for (const auto &func_graph : func_graphs) {
DumpIR("before_lift_" + func_graph->ToString() + ".ir", func_graph);
}
@ -106,7 +108,7 @@ FuncGraphVector LiftFvMulti(const pipeline::ResourceBasePtr &resource, const Fun
}
FuncGraphVector new_fgs = LiftingCloneMulti(func_graphs);
#ifdef ENABLE_DUMP_IR
if (save_graphs_flag) {
if (context->CanDump(advanced)) {
for (const auto &new_fg : new_fgs) {
DumpIR("after_lift_" + new_fg->ToString() + ".ir", new_fg);
}

View File

@ -134,9 +134,9 @@ AnfNodePtr TryExpandCNodeFE(const AnfNodePtr &node) {
return nullptr;
}
#ifdef ENABLE_DUMP_IR
auto ms_context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(ms_context);
if (ms_context->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIR("expand_fe_" + GetCNodeFuncName(node->cast<CNodePtr>()) + ".ir", expand_fg);
}
#endif

View File

@ -303,7 +303,9 @@ bool SubstitutionList::ApplySubstitutionsToIR(const OptimizerPtr &optimizer, con
loop = loop || change;
#ifdef ENABLE_DUMP_IR
static const auto enable_dump_pass_ir = GetDumpConfig().enable_dump_pass_ir;
if (enable_dump_pass_ir && MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if ((enable_dump_pass_ir && context->CanDump(introductory)) || context->CanDump(fully)) {
auto fg_name = optimizer->name() + "_r" + std::to_string(optimizer->CurPass_.counter) + "_" +
optimizer->CurPass_.name + "_" + substitution->name_;
static const auto switch_order = (common::GetEnv("MS_DEV_SAVE_GRAPHS_SORT_MODE") == "1");
@ -312,7 +314,7 @@ bool SubstitutionList::ApplySubstitutionsToIR(const OptimizerPtr &optimizer, con
} else {
DumpIR(fg_name + ".ir", func_graph);
}
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
if (context->CanDump(fully)) {
draw::Draw(fg_name + ".dot", func_graph);
}
}

View File

@ -203,7 +203,9 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
use_profile ? (WITH(MsProfile::GetProfile()->Step(pass_names_[i])) opt_func) : opt_func();
#ifdef ENABLE_DUMP_IR
static const auto enable_dump_pass_ir = GetDumpConfig().enable_dump_pass_ir;
if (enable_dump_pass_ir && MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if ((enable_dump_pass_ir && context->CanDump(introductory)) || context->CanDump(fully)) {
auto fg_name =
"opt_substep_" + name_ + "_r" + std::to_string(counter) + "_" + std::to_string(i) + "_" + pass_names_[i];
MS_LOG(DEBUG) << "The opt " << name_ << " round " << counter << " OptPass " << pass_names_[i] << " end.";
@ -213,7 +215,7 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
} else {
DumpIR(fg_name + ".ir", func_graph);
}
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
if (context->CanDump(fully)) {
draw::Draw(fg_name + ".dot", func_graph);
}
MS_LOG(DEBUG) << "Dump " << pass_names_[i] << " func graph.";

View File

@ -48,14 +48,16 @@ std::vector<PrimitivePtr> FindPrimtive(const FuncGraphPtr &graph, const std::str
void DumpGraph(const FuncGraphPtr &root, const std::string &name) {
#ifdef ENABLE_DUMP_IR
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
static const auto switch_order = (common::GetEnv("MS_DEV_SAVE_GRAPHS_SORT_MODE") == "1");
if (switch_order) {
ExportIR(name + ".ir", root);
} else {
DumpIR(name + ".ir", root);
}
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
if (context->CanDump(fully)) {
draw::Draw(name + ".dot", root);
}
}

View File

@ -98,7 +98,9 @@ bool StepAutoParallel(const FuncGraphPtr &root, const opt::OptimizerPtr &) {
MSLogTime msTime;
msTime.Start();
#ifdef ENABLE_DUMP_IR
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpGraph(root, std::string(STEP_AUTO_PARALLEL_BEGIN));
}
#endif

View File

@ -792,7 +792,9 @@ bool OptimizeAction(const ResourcePtr &resource, const std::vector<PassItem> &pa
MS_LOG(EXCEPTION) << "Pass running to end, failed in pass:" << pass.first;
}
#ifdef ENABLE_DUMP_IR
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG) && resource->func_graph() != nullptr) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced) && resource->func_graph() != nullptr) {
auto fg_name = "opt_pass_" + std::to_string(counter) + "_" + pass.first;
auto func_graph = resource->func_graph();
MS_EXCEPTION_IF_NULL(func_graph);
@ -802,7 +804,7 @@ bool OptimizeAction(const ResourcePtr &resource, const std::vector<PassItem> &pa
} else {
DumpIR(fg_name + ".ir", func_graph);
}
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
if (context->CanDump(fully)) {
draw::Draw(fg_name + ".dot", func_graph);
}
MS_LOG(DEBUG) << "Dump " << fg_name << " func graph.";

View File

@ -272,7 +272,9 @@ FuncGraphPtr CompileCacheManager::GetCachedFuncGraph(const FuncGraphManagerPtr &
break;
}
}
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIR("cache_loaded_graph_" + std::to_string(compile_cache_id_) + ".ir", fg);
}
return fg;

View File

@ -1093,7 +1093,9 @@ void RDRRecordGraph(const size_t action_index, const size_t action_size, const s
#ifdef ENABLE_DUMP_IR
void RecordIR(const size_t action_index, const size_t action_size, const std::string &action_name,
const FuncGraphPtr &graph, FuncGraphPtr *user_graph) {
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG) && graph != nullptr) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(introductory) && graph != nullptr) {
*user_graph = graph;
std::string base_name = GetBaseNameForIR(SizeToLong(action_index), action_name);
@ -1108,7 +1110,7 @@ void RecordIR(const size_t action_index, const size_t action_size, const std::st
DumpIR(base_name + ".ir", graph, false, kTopStack);
}
}
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
if (context->CanDump(fully)) {
draw::Draw(base_name + ".dot", graph);
}
}
@ -1119,7 +1121,9 @@ void RecordIR(const size_t action_index, const size_t action_size, const std::st
void SaveGraphForReadability(const std::string &action_name, const FuncGraphPtr &graph, const ResourcePtr &resource) {
if (graph != nullptr && action_name.find("optimize") != string::npos) {
#ifdef ENABLE_DUMP_IR
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIRProto(graph, action_name);
}
#endif
@ -1190,8 +1194,10 @@ void Pipeline::Run() {
#endif
#ifdef ENABLE_DUMP_IR
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG) && (user_graph != nullptr)) {
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced) && (user_graph != nullptr)) {
if (context->CanDump(fully)) {
draw::DrawUserFuncGraph("ModelDigraph.dot", user_graph);
}
}
@ -1677,10 +1683,9 @@ FuncGraphPtr LoadMindIR(const std::string &file_name, const char *dec_key, const
func_graph = mindir_loader.LoadMindIR(file_name);
}
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIR("load.ir", func_graph);
}
#endif

View File

@ -652,7 +652,9 @@ void GradExecutor::EndGraphImpl(const InputArgsInfoPtr &input_args_info) {
if (is_top_cell_end) {
auto output_node = GetInput(input_args_info->out_value, out_id);
input_args_info->use_dynamic_shape_process |= CheckGraphDynamic(output_node);
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
curr_g()->set_output(output_node);
PyNativeAlgo::Common::DumpGraphIR("fg.ir", curr_g());
}

View File

@ -168,10 +168,10 @@ std::shared_ptr<PyNativeExecutor> Common::GetPyNativeExecutor() {
}
void Common::DumpGraphIR(const std::string &filename, const FuncGraphPtr &graph) {
#ifdef ENABLE_DUMP_IR
const auto &ms_context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(ms_context);
if (ms_context->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
#ifdef ENABLE_DUMP_IRs
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(introductory)) {
DumpIR(filename, graph);
}
#endif

View File

@ -61,10 +61,9 @@ bool TaskGenerator::GenTasks(const std::vector<CNodePtr> &anf_node_list, std::ve
#ifdef ENABLE_DUMP_IR
string task_info_name = "task_info_graph." + std::to_string(graph_id);
(void)mindspore::RDR::RecordTaskDebugInfo(SUBMODULE_ID, task_info_name, task_debug_info_list_);
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
#ifndef ENABLE_SECURITY
std::string file_path = GetSaveGraphsPathName("task_info_graph_" + std::to_string(graph_id) + ".ir");
DumpTaskInfo(file_path);

View File

@ -61,9 +61,9 @@ constexpr size_t kFirstIndex = 1;
#ifdef ENABLE_DUMP_IR
bool IsSaveGraph() {
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
return context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
return context->CanDump(advanced);
}
void DumpAllGraphs(NotNull<KernelGraphPtr> kg, std::set<KernelGraphPtr> *memo) {

View File

@ -298,10 +298,9 @@ void AscendGraphOptimization::OptimizeExecutionOrder(const KernelGraphPtr &graph
RootGraphExecutorValidate(NOT_NULL(graph));
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIRProto(graph, "before_removeNop_" + std::to_string(graph->graph_id()));
}
#endif
@ -319,7 +318,7 @@ void AscendGraphOptimization::OptimizeExecutionOrder(const KernelGraphPtr &graph
device::KernelAdjust::GetInstance().InsertOverflowCheckOperations(NOT_NULL(graph));
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context->CanDump(advanced)) {
DumpIR("after_adjust_kernel.ir", graph);
}
#endif
@ -359,10 +358,9 @@ void AscendGraphOptimization::CommOpReuse(const KernelGraphPtr &graph) const {
MS_LOG(INFO) << "Status record: end comm op reuse. graph id: " << graph->graph_id();
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
std::string file_name = "hwopt_comm_reuse_after_graph_" + std::to_string(graph->graph_id()) + ".ir";
DumpIR(file_name, graph);
}
@ -442,10 +440,9 @@ void AscendGraphOptimization::RecurseSelectKernelInfo(const KernelGraphPtr &grap
}
}
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(introductory)) {
std::string file_name = "select_kernel_before_graph_" + std::to_string(graph->graph_id()) + ".ir";
DumpIR(file_name, graph, true, kTopStack);
}
@ -454,7 +451,7 @@ void AscendGraphOptimization::RecurseSelectKernelInfo(const KernelGraphPtr &grap
SetOperatorInfo(graph);
MS_LOG(INFO) << "Status record: end select kernel info. graph id: " << graph->graph_id();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context->CanDump(introductory)) {
std::string file_name = "select_kernel_after_graph_" + std::to_string(graph->graph_id()) + ".ir";
DumpIR(file_name, graph);
}

View File

@ -446,10 +446,9 @@ GraphId AscendSession::CompileGraphImpl(NotNull<FuncGraphPtr> func_graph) {
RootGraphExecutorValidate(NOT_NULL(root_graph), all_graphs);
#ifdef ENABLE_DUMP_IR
// dump graph before remove nop nodes
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIRProto(root_graph, "before_removeNop_" + std::to_string(graph_sum_));
}
#endif
@ -570,10 +569,9 @@ void AscendSession::CompileChildGraph(const KernelGraphPtr &child_graph) const {
opt::AscendBackendIRFusionOptimization(child_graph);
child_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(introductory)) {
std::string file_name = "select_kernel_before_graph_" + std::to_string(child_graph->graph_id()) + ".ir";
DumpIR(file_name, child_graph);
}
@ -581,7 +579,7 @@ void AscendSession::CompileChildGraph(const KernelGraphPtr &child_graph) const {
// select kernel build info
SelectKernel(child_graph);
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context->CanDump(introductory)) {
std::string file_name = "select_kernel_after_graph_" + std::to_string(child_graph->graph_id()) + ".ir";
DumpIR(file_name, child_graph);
}
@ -914,10 +912,9 @@ void AscendSession::AdjustKernel(const std::shared_ptr<KernelGraph> &kernel_grap
device::KernelAdjust::GetInstance().InsertDeviceLoopCtrl(kernel_graph);
device::KernelAdjust::GetInstance().ProcessLoopSink(kernel_graph);
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIR("after_adjust_kernel.ir", kernel_graph);
}
#endif
@ -1385,10 +1382,9 @@ void AscendSession::RecurseSelectKernelInfo(const KernelGraphPtr &graph, std::se
MS_LOG(INFO) << "Finish selecting kernel info in graph: " << graph->graph_id();
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(introductory)) {
std::string file_name = "select_kernel_after_graph_" + std::to_string(graph->graph_id()) + ".ir";
DumpIR(file_name, graph);
}

View File

@ -742,8 +742,10 @@ FuncGraphPtr GeGraphExecutor::BuildDFGraph(const FuncGraphPtr &anf_graph,
const transform::TensorOrderMap &init_inputs_map, bool export_air) {
MS_EXCEPTION_IF_NULL(anf_graph);
#ifdef ENABLE_DUMP_IR
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
if (context->CanDump(fully)) {
draw::Draw("anf_graph.dot", anf_graph); // for debug
}
DumpIR("anf_graph.ir", anf_graph, true);

View File

@ -355,8 +355,7 @@ void AscendBackendIRFusionOptimization(const std::shared_ptr<session::KernelGrap
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
#ifdef ENABLE_DUMP_IR
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_d_ir_fusion_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
DumpIRProto(kernel_graph, "before_hwopt_" + std::to_string(kernel_graph->graph_id()));
@ -403,7 +402,7 @@ void AscendBackendIRFusionOptimization(const std::shared_ptr<session::KernelGrap
(void)optimizer->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_d_ir_fusion_after_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -421,8 +420,7 @@ void RunOpAscendBackendIRFusionOptimization(const std::shared_ptr<session::Kerne
return;
}
#ifdef ENABLE_DUMP_IR
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
DumpIR("hwopt_d_ir_fusion_before.ir", kernel_graph);
}
#endif
@ -492,7 +490,7 @@ void RunOpAscendBackendIRFusionOptimization(const std::shared_ptr<session::Kerne
(void)optimizer->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
DumpIR("hwopt_d_ir_fusion_after.ir", kernel_graph);
}
#endif
@ -531,8 +529,7 @@ void AscendBackendOptimization(const std::shared_ptr<session::KernelGraph> &kern
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
#ifdef ENABLE_DUMP_IR
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_d_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -581,11 +578,11 @@ void AscendBackendOptimization(const std::shared_ptr<session::KernelGraph> &kern
(void)optimizer2->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name = "hwopt_d_end_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph, true, kWholeStack);
DumpIRProto(kernel_graph, "after_hwopt_" + std::to_string(kernel_graph->graph_id()));
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPH_DOT)) {
if (context_ptr->CanDump(fully)) {
draw::Draw("hwopt_d_end.dot", kernel_graph);
}
}
@ -613,8 +610,7 @@ void AscendBackendUBFusionOptimization(const std::shared_ptr<session::KernelGrap
build_manager.TbePreBuild(kernel_graph);
}
#ifdef ENABLE_DUMP_IR
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name = "hwopt_d_ub_fusion_before_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -647,7 +643,7 @@ void AscendBackendUBFusionOptimization(const std::shared_ptr<session::KernelGrap
(void)optimizer->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name = "hwopt_d_ub_fusion_after_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -659,8 +655,7 @@ void AscendUnifyMindIR(const std::shared_ptr<session::KernelGraph> &kernel_graph
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
#ifdef ENABLE_DUMP_IR
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_d_before_unify_mindir_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
DumpIRProto(kernel_graph, "before_unify_mindir_hwopt_" + std::to_string(kernel_graph->graph_id()));
@ -710,7 +705,7 @@ void AscendUnifyMindIR(const std::shared_ptr<session::KernelGraph> &kernel_graph
(void)optimizer->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_d_after_unify_mindir_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph);
}
@ -722,8 +717,7 @@ void AscendOpAdaptation(const std::shared_ptr<session::KernelGraph> &kernel_grap
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
#ifdef ENABLE_DUMP_IR
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_d_before_op_adaptation_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph, true, kWholeStack);
DumpIRProto(kernel_graph, "before_op_adaptation_hwopt_" + std::to_string(kernel_graph->graph_id()));
@ -737,7 +731,7 @@ void AscendOpAdaptation(const std::shared_ptr<session::KernelGraph> &kernel_grap
(void)optimizer->Optimize(kernel_graph);
kernel_graph->SetExecOrderByDefault();
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(introductory)) {
std::string file_name = "hwopt_d_after_op_adaptation_graph_" + std::to_string(kernel_graph->graph_id()) + ".ir";
DumpIR(file_name, kernel_graph, true, kWholeStack);
}

View File

@ -42,10 +42,9 @@ void GeOptimization(const FuncGraphPtr &func_graph) {
MS_LOG(INFO) << "GE optimization start, graph: " << func_graph->ToString() << ".";
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
std::string file_name = "hwopt_d_before_ge_optimization_graph_" + func_graph->ToString() + ".ir";
DumpIR(file_name, func_graph);
}
@ -78,7 +77,7 @@ void GeOptimization(const FuncGraphPtr &func_graph) {
(void)optimizer->Optimize(func_graph);
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context->CanDump(advanced)) {
std::string file_name = "hwopt_d_after_ge_optimization_graph_" + func_graph->ToString() + ".ir";
DumpIR(file_name, func_graph);
}

View File

@ -150,9 +150,9 @@ void CPUKernelExecutor::OptimizeGraph(const FuncGraphPtr &graph) const {
// SetOperatorInfo may generate new node, so need set kernel object type again.
kernel_graph->SetKernelObjectTypesForUnrealNodes();
#ifdef ENABLE_DUMP_IR
const auto &ms_context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(ms_context);
if (ms_context->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
DumpIR("hwopt_comm_after_kernel_select_" + graph->ToString() + ".ir", graph, true);
}
#endif

View File

@ -413,9 +413,8 @@ GraphId GPUSession::CompileGraphImpl(const KernelGraphPtr &graph) {
json_parser.Parse();
#endif
#ifdef ENABLE_DUMP_IR
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
// Dump .pb graph before graph optimization
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
// Dump .pb graph before graph optimization
DumpIRProto(graph, "before_opt_" + std::to_string(graph->graph_id()));
}
#endif
@ -435,7 +434,7 @@ GraphId GPUSession::CompileGraphImpl(const KernelGraphPtr &graph) {
AssignStream(graph);
#ifdef ENABLE_DUMP_IR
// Dump .pb graph before remove nop nodes
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
DumpIRProto(graph, "before_removeNop_" + std::to_string(graph->graph_id()));
}
#endif
@ -453,7 +452,7 @@ GraphId GPUSession::CompileGraphImpl(const KernelGraphPtr &graph) {
#endif
// Dump .pb graph after graph optimization
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
DumpIRProto(graph, "after_opt_" + std::to_string(graph->graph_id()));
}
#endif

View File

@ -105,8 +105,7 @@ void RegMsContext(py::module *m) {
.value("disable_format_transform", MsCtxParam::MS_CTX_DISABLE_FORMAT_TRANSFORM)
.value("memory_offload", MsCtxParam::MS_CTX_ENABLE_MEM_OFFLOAD)
.value("memory_optimize_level", MsCtxParam::MS_CTX_MEMORY_OPTIMIZE_LEVEL)
.value("op_timeout", MsCtxParam::MS_CTX_OP_TIMEOUT)
.value("save_graph_dot", MsCtxParam::MS_CTX_SAVE_GRAPH_DOT);
.value("op_timeout", MsCtxParam::MS_CTX_OP_TIMEOUT);
(void)py::class_<mindspore::MsContext, std::shared_ptr<mindspore::MsContext>>(*m, "MSContext")
.def_static("get_instance", &mindspore::MsContext::GetInstance, "Get ms context instance.")
.def("get_param", &mindspore::MsCtxGetParameter, "Get value of specified parameter.")

View File

@ -545,9 +545,10 @@ GraphId GraphCompiler::CompileGraphImpl(const KernelGraphPtr &graph, const Devic
MS_EXCEPTION_IF_NULL(session_);
#ifdef ENABLE_DUMP_IR
bool save_graphs = ms_context->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
// Dump .pb graph before graph optimization.
if (save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(advanced)) {
// Dump .pb graph before graph optimization.
DumpIRProto(graph, "before_opt_" + std::to_string(graph->graph_id()));
}
#endif
@ -577,7 +578,7 @@ GraphId GraphCompiler::CompileGraphImpl(const KernelGraphPtr &graph, const Devic
if (!run_in_pynative) {
OptimizeNopNode(graph.get());
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context->CanDump(advanced)) {
DumpIR("hwopt_comm_after_eliminate_nopnode_" + graph->ToString() + ".ir", graph, true);
}
#endif
@ -616,7 +617,7 @@ GraphId GraphCompiler::CompileGraphImpl(const KernelGraphPtr &graph, const Devic
SetSummaryNodesRefCount(graph.get());
#ifdef ENABLE_DUMP_IR
// Dump .pb graph after graph optimization.
if (save_graphs) {
if (context->CanDump(advanced)) {
DumpIRProto(graph, "after_opt_" + std::to_string(graph->graph_id()));
}
#endif

View File

@ -2579,10 +2579,9 @@ void GraphScheduler::PersistDeviceTensorForRootGraphControlNode(const GraphCompi
void GraphScheduler::DumpActor(const ActorSet *actor_set, const GraphCompilerInfo &graph_compiler_info) const {
MS_EXCEPTION_IF_NULL(actor_set);
const auto &context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
auto save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (!save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (!context->CanDump(advanced)) {
return;
}

View File

@ -65,10 +65,9 @@ std::string ActorSetOptimizer::GetPassFullName(const ActorSetPtr &actor_set, con
void ActorSetOptimizer::DumpPassActorSet(const ActorSetPtr &actor_set, const std::string &pass_full_name) const {
MS_EXCEPTION_IF_NULL(actor_set);
const auto &context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
auto save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (!save_graphs) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (!context->CanDump(advanced)) {
return;
}

View File

@ -993,7 +993,9 @@ void DfGraphConvertor::ConvertWhileBody(const AnfNodePtr &node) {
converter.ConvertAllNode().BuildWhileSubGraph();
while_dfgraph_cache_[cur_while_node_]->push_back(*(converter.df_graph_));
std::string name = graph_node->ToString() + "_ge_graph.dot";
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(fully)) {
converter.DrawComputeGraph(name);
}
return;
@ -1123,7 +1125,9 @@ void DfGraphConvertor::ConvertWhileCond(const AnfNodePtr &node) {
converter.ConvertAllNode().BuildWhileSubGraph();
while_dfgraph_cache_[cur_while_node_]->push_back(*(converter.df_graph_));
std::string name = func_graph->ToString() + "_ge_graph.dot";
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(fully)) {
converter.DrawComputeGraph(name);
}
@ -1171,7 +1175,9 @@ void DfGraphConvertor::ConvertWhileAfter(const AnfNodePtr &node) {
converter.SetWhileOutputHandle(while_op);
converter.ConvertAllNode().BuildWhileAfterSubGraph();
std::string name = graph_node->ToString() + "_ge_graph.dot";
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(fully)) {
converter.DrawComputeGraph(name);
}
MS_LOG(DEBUG) << "add while after graph " << converter.graph_const_inputs_.size()
@ -2328,7 +2334,9 @@ void DfGraphConvertor::ProcessSubgraph(const AnfNodePtr &node, const AnfNodePtr
(void)converter.ConvertAllNode().BuildGraph(anf_graph->ToString());
#ifdef ENABLE_DUMP_IR
std::string name = graph_node->ToString() + "_ge_graph.dot";
if (MsContext::GetInstance()->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG)) {
auto context = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context);
if (context->CanDump(fully)) {
converter.DrawComputeGraph(name);
}
#endif
@ -2631,8 +2639,7 @@ void DfGraphConvertor::TransDataType(const FuncGraphPtr &anf_graph) const {
#ifdef ENABLE_DUMP_IR
auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr);
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name = "ge_trans_data_type_before_graph_" + anf_graph->ToString() + ".ir";
DumpIR(file_name, anf_graph);
}
@ -2648,7 +2655,7 @@ void DfGraphConvertor::TransDataType(const FuncGraphPtr &anf_graph) const {
}
}
#ifdef ENABLE_DUMP_IR
if (save_graphs) {
if (context_ptr->CanDump(advanced)) {
std::string file_name = "ge_trans_data_type_after_graph_" + anf_graph->ToString() + ".ir";
DumpIR(file_name, anf_graph);
}

View File

@ -45,12 +45,12 @@ std::shared_ptr<MsContext> MsContext::inst_context_ = nullptr;
MsContext::MsContext(const std::string &policy, const std::string &target) {
#ifndef ENABLE_SECURITY
set_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG, false);
set_param<int>(MS_CTX_SAVE_GRAPHS_FLAG, 0);
set_param<std::string>(MS_CTX_SAVE_GRAPHS_PATH, ".");
set_param<std::string>(MS_CTX_COMPILE_CACHE_PATH, "");
#else
// Need set a default value for arrays even if running in the security mode.
bool_params_[MS_CTX_SAVE_GRAPHS_FLAG - MS_CTX_TYPE_BOOL_BEGIN] = false;
int_params_[MS_CTX_SAVE_GRAPHS_FLAG - MS_CTX_TYPE_BOOL_BEGIN] = 0;
string_params_[MS_CTX_SAVE_GRAPHS_PATH - MS_CTX_TYPE_STRING_BEGIN] = ".";
#endif
set_param<std::string>(MS_CTX_PYTHON_EXE_PATH, "python");
@ -110,7 +110,6 @@ MsContext::MsContext(const std::string &policy, const std::string &target) {
set_param<bool>(MS_CTX_ENABLE_RECOVERY, false);
set_param<bool>(MS_CTX_ENABLE_GE_HETEROGENOUS, false);
set_param<bool>(MS_CTX_DISABLE_FORMAT_TRANSFORM, false);
set_param<bool>(MS_CTX_SAVE_GRAPH_DOT, false);
set_param<int>(MS_CTX_MEMORY_OPTIMIZE_LEVEL, kOptimizeO0);
set_param<uint32_t>(MS_CTX_OP_TIMEOUT, kOpTimeout);
@ -357,4 +356,12 @@ void MsContext::CheckEnv(const std::string &device) {
}
}
}
bool MsContext::CanDump(const int &level) {
int save_graphs = MsContext::GetInstance()->get_param<int>(MS_CTX_SAVE_GRAPHS_FLAG);
if (save_graphs >= level) {
return true;
}
return false;
}
} // namespace mindspore

View File

@ -36,6 +36,12 @@ enum MsBackendPolicy {
kMsBackendUnknown = 5,
};
enum level {
introductory = 1,
advanced = 2,
fully = 3,
};
const int kGraphMode = 0;
const int kPynativeMode = 1;
const char kDeviceUnDefined[] = "DeviceUnDefined";
@ -78,7 +84,6 @@ enum MsCtxParam : unsigned {
MS_CTX_IS_PYNATIVE_GE_INIT,
MS_CTX_PRECOMPILE_ONLY,
MS_CTX_ENABLE_PROFILING,
MS_CTX_SAVE_GRAPHS_FLAG,
MS_CTX_ENABLE_PARALLEL_SPLIT,
MS_CTX_ENABLE_INFER_OPT,
MS_CTX_GRAD_FOR_SCALAR,
@ -89,13 +94,13 @@ enum MsCtxParam : unsigned {
MS_CTX_ENABLE_RECOVERY,
MS_CTX_ENABLE_GE_HETEROGENOUS,
MS_CTX_DISABLE_FORMAT_TRANSFORM,
MS_CTX_SAVE_GRAPH_DOT,
MS_CTX_TYPE_BOOL_END,
// parameter of type int
MS_CTX_TYPE_INT_BEGIN = MS_CTX_TYPE_BOOL_END,
MS_CTX_EXECUTION_MODE = MS_CTX_TYPE_INT_BEGIN,
MS_CTX_MEMORY_OPTIMIZE_LEVEL,
MS_CTX_SAVE_GRAPHS_FLAG,
MS_CTX_TYPE_INT_END,
// parameter of type uint32
@ -156,6 +161,7 @@ class MS_CORE_API MsContext {
void Refresh();
bool enable_dump_ir() const;
bool CanDump(const int &level);
std::string backend_policy() const;
bool set_backend_policy(const std::string &policy);
// _comm_helper.py will try to dlopen libhccl.so, and minddata will try to dlopen libdvpp_utils.so. if load ascend

View File

@ -704,7 +704,7 @@ def _check_target_specific_cfgs(device, arg_key):
@args_unreset_check(device_id=int, variable_memory_max_size=str, max_device_memory=str, mempool_block_size=str)
@args_type_check(mode=int, precompile_only=bool, device_target=str, device_id=int, save_graphs=bool,
@args_type_check(mode=int, precompile_only=bool, device_target=str, device_id=int, save_graphs=(bool, int),
save_graphs_path=str, enable_dump=bool, auto_tune_mode=str,
save_dump_path=str, enable_reduce_precision=bool, variable_memory_max_size=str,
enable_auto_mixed_precision=bool, inter_op_parallel_num=int,
@ -712,7 +712,7 @@ def _check_target_specific_cfgs(device, arg_key):
max_device_memory=str, print_file_path=str, max_call_depth=int, env_config_path=str,
graph_kernel_flags=str, save_compile_cache=bool, runtime_num_threads=int, load_compile_cache=bool,
grad_for_scalar=bool, pynative_synchronize=bool, mempool_block_size=str, disable_format_transform=bool,
op_timeout=int, save_graph_dot=bool)
op_timeout=int)
def set_context(**kwargs):
"""
Set context for running environment.
@ -993,6 +993,13 @@ def set_context(**kwargs):
logger.warning(f"For 'context.set_context', '{key}' parameter is deprecated. "
"For details, please see the interface parameter API comments")
continue
if key == 'save_graphs':
if value is True:
value = 1
if value is False:
value = 0
if value > 3:
raise ValueError(f"value for save_graphs should be 0-3 but got '{value}'")
if not _check_target_specific_cfgs(device, key):
continue
if hasattr(ctx, key):

View File

@ -120,7 +120,7 @@ def test_sit_auto_mix_precision_model_o0():
dataset1.set_label_data_type(np.float16)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
context.set_context(save_graphs=True, save_graphs_path='./test_amp_o0')
context.set_context(save_graphs=3, save_graphs_path='./test_amp_o0')
net = Net(3, 10)
net.to_float(dtype.float16)
opt = nn.Momentum(params=net.trainable_params(), learning_rate=0.001, momentum=0.0009)
@ -158,7 +158,7 @@ def test_sit_auto_mix_precision_model_o2():
fakedata_mode=FakeDataInitMode.OnesInit)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
context.set_context(save_graphs=True, save_graphs_path='./test_amp_o2')
context.set_context(save_graphs=3, save_graphs_path='./test_amp_o2')
net = Net(3, 10)
opt = nn.Momentum(params=net.trainable_params(), learning_rate=0.001, momentum=0.0009)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=False)
@ -206,7 +206,7 @@ def test_sit_auto_mix_precision_model_o1():
fakedata_mode=FakeDataInitMode.OnesInit)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
context.set_context(save_graphs=True, save_graphs_path='./test_amp_o1')
context.set_context(save_graphs=3, save_graphs_path='./test_amp_o1')
net = Net(3, 10)
opt = nn.Momentum(params=net.trainable_params(), learning_rate=0.001, momentum=0.0009)
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=False)

View File

@ -80,7 +80,7 @@ def test_save_graphs1():
i = i + 2
return out
context.set_context(save_graphs=True, save_graphs_path="test_save_graphs1")
context.set_context(save_graphs=3, save_graphs_path="test_save_graphs1")
a = Tensor(np.array(1), mindspore.int32)
b = Tensor(np.array(3), mindspore.int32)
graph_forward_net = ForwardNet(max_cycles=10)
@ -90,7 +90,7 @@ def test_save_graphs1():
ir, dat, dot = count_ir_files("test_save_graphs1")
assert ir > 15
assert dat == 0
assert dot == 0
assert dot > 0
remove_path("./test_save_graphs1")
context.set_context(save_graphs=False)
@ -126,13 +126,13 @@ def test_save_graphs2():
forward_net = ForwardNet(max_cycles=4)
backward_net = BackwardNet(forward_net)
context.set_context(save_graphs=True, save_graphs_path="./test_save_graphs2/tmp")
context.set_context(save_graphs=3, save_graphs_path="./test_save_graphs2/tmp")
backward_net(a, b)
ir, dat, dot = count_ir_files("test_save_graphs2/tmp")
assert ir > 15
assert dat == 0
assert dot == 0
assert dot > 15
remove_path("./test_save_graphs2")
context.set_context(save_graphs=False)
@ -170,12 +170,12 @@ def test_save_graphs3():
graph_forward_net = ForwardNetNoAssign(max_cycles=4)
graph_backward_net = BackwardNet(graph_forward_net)
context.set_context(save_graphs=True, save_graphs_path="./test_save_graphs3/../test_save_graphs3")
context.set_context(save_graphs=3, save_graphs_path="./test_save_graphs3/../test_save_graphs3")
graph_backward_net(a, b)
ir, dat, dot = count_ir_files("test_save_graphs3")
assert ir > 15
assert dat == 0
assert dot == 0
assert dot > 15
remove_path("./test_save_graphs3")
context.set_context(save_graphs=False)

View File

@ -128,7 +128,7 @@ def auto_parallel_compile_net(mode, dev_num, net, strategy1=None, strategy2=None
class TestGlobalNormInserted:
def setup_method(self):
self.output_path = './graphs' + self.__str__()
context.set_context(save_graphs=True,
context.set_context(save_graphs=3,
save_graphs_path=self.output_path)
def teardown_method(self):

View File

@ -91,7 +91,7 @@ def auto_parallel_compile_net(mode, dev_num, net, strategy1=None, strategy2=None
class TestSharedParameterCast:
def setup_method(self):
self.output_path = './graphs' + self.__str__()
context.set_context(save_graphs=True,
context.set_context(save_graphs=3,
save_graphs_path=self.output_path)
def teardown_method(self):

View File

@ -429,7 +429,7 @@ def run_pipeline_split_function(pipeline_net, micro_batch_interleaved=1):
class TestPipelineSplitWithNoOptimizer:
def setup_method(self):
self.output_path = './graphs' + self.__str__()
context.set_context(save_graphs=True,
context.set_context(save_graphs=3,
save_graphs_path=self.output_path)
def teardown_method(self):

View File

@ -156,7 +156,7 @@ class BasicValidator:
def setup_method(self):
self.output_path = './graphs' + self.__str__()
context.set_context(save_graphs=True,
context.set_context(save_graphs=3,
save_graphs_path=self.output_path)
def teardown_method(self):

View File

@ -51,7 +51,7 @@ def test_jit():
return x + 1
context.set_context(mode=context.GRAPH_MODE)
context.set_context(save_graphs=True, save_graphs_path="ir_dump_path")
context.set_context(save_graphs=3, save_graphs_path="ir_dump_path")
input1 = np.random.randn(5, 5)
add(Tensor(input1, ms.float32))
result = find_files("./ir_dump_path/*validate*.ir", "test_debug_info.py:51/ return x + 1/")
@ -74,7 +74,7 @@ def test_cell_jit():
return x
context.set_context(mode=context.GRAPH_MODE)
context.set_context(save_graphs=True, save_graphs_path="ir_dump_path")
context.set_context(save_graphs=3, save_graphs_path="ir_dump_path")
input1 = np.random.randn(5, 5)
net = Net()
net(Tensor(input1, ms.float32))