add StringRecorder for RDR, record somas info:

1. add string recorder
2. record somas
3. remove SomasInfoRecorder
4. fix clang and cpplint
This commit is contained in:
luopengting 2021-01-20 16:08:32 +08:00
parent a636bc29ec
commit 66961f2f1d
9 changed files with 299 additions and 229 deletions

View File

@ -1,5 +1,5 @@
/** /**
* Copyright 2020 Huawei Technologies Co., Ltd * Copyright 2020-2021 Huawei Technologies Co., Ltd
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -34,6 +34,7 @@
#include "backend/optimizer/common/helper.h" #include "backend/optimizer/common/helper.h"
#include "utils/ms_context.h" #include "utils/ms_context.h"
#include "debug/common.h" #include "debug/common.h"
#include "debug/rdr/running_data_recorder.h"
#include "common/thread_pool.h" #include "common/thread_pool.h"
#include "profiler/device/common/memory_profiling.h" #include "profiler/device/common/memory_profiling.h"
@ -98,6 +99,15 @@ bool Somas::InitSomasTensors(const session::KernelGraph *graph) {
<< nodes_list_.size() << " nodes, " << tensors_list_.size() << " tensors, and " << nodes_list_.size() << " nodes, " << tensors_list_.size() << " tensors, and "
<< contiguous_tensors_list_.size() << " contiguous lists"; << contiguous_tensors_list_.size() << " contiguous lists";
#ifdef ENABLE_DUMP_IR
SubModuleId module = SubModuleId::SM_OPTIMIZER;
std::string tag = "somas";
std::string filename = "somas_pre_processed_info_" + std::to_string(graph->graph_id()) + ".ir";
mindspore::RDR::RecordString(module, tag, SomasInfo(), filename);
filename = "somas_offline_log_" + std::to_string(graph->graph_id()) + ".ir";
mindspore::RDR::RecordString(module, tag, Offline(), filename);
#endif
if (save_graphs_) { if (save_graphs_) {
std::string file_path = std::string file_path =
save_graphs_path_ + "/" + "somas_pre_processed_info_" + std::to_string(graph->graph_id()) + ".ir"; save_graphs_path_ + "/" + "somas_pre_processed_info_" + std::to_string(graph->graph_id()) + ".ir";
@ -380,6 +390,14 @@ void Somas::InitBasicInfo(const session::KernelGraph *graph) {
auto context_ptr = MsContext::GetInstance(); auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr); MS_EXCEPTION_IF_NULL(context_ptr);
#ifdef ENABLE_DUMP_IR
SubModuleId module = SubModuleId::SM_OPTIMIZER;
std::string tag = "somas";
std::string filename = "somas_initial_info_" + std::to_string(graph->graph_id()) + ".ir";
mindspore::RDR::RecordString(module, tag, SomasInfo(), filename);
#endif
save_graphs_ = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG); save_graphs_ = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
save_graphs_path_ = context_ptr->get_param<std::string>(MS_CTX_SAVE_GRAPHS_PATH); save_graphs_path_ = context_ptr->get_param<std::string>(MS_CTX_SAVE_GRAPHS_PATH);
if (save_graphs_path_.empty()) { if (save_graphs_path_.empty()) {
@ -1089,6 +1107,121 @@ std::string Somas::GetSplitName(const std::string &scope_name) const {
} }
} }
std::string Somas::SomasInfo() {
std::ostringstream oss;
DumpParameters(oss);
DumpTensors(oss);
DumpNodes(oss);
oss << "\n\nAll Stream Groups:\n\n";
for (const auto &stream_group : streams_groups_) {
for (const auto &stream : stream_group) {
oss << "stm" << stream << " ";
}
oss << "\n";
}
if (!ref_node_constraints_.empty()) {
oss << "\n\nAll Ref Node Info:\n\n";
for (const auto &ref_in_out : ref_node_constraints_) {
oss << "refnode input-output:";
for (const auto &item : ref_in_out) {
oss << "%" << item << "T ";
}
oss << "\n";
}
}
return oss.str();
}
void Somas::DumpNodes(std::ostringstream &oss) const {
oss << "\n\nAll Nodes:\n\n";
for (const auto &node : nodes_list_) {
auto scope_name = node->scope_full_name_;
std::string split_name = GetSplitName(scope_name);
oss << "$" << node->GetId() << "\t" << split_name << "\t" << static_cast<int>(node->GetType()) << "\t";
auto input_num = node->input_tensors_.size() + node->input_parameters_map_.size();
oss << "inputs[";
size_t tensor_index = 0;
for (size_t input_index = 0; input_index < input_num; input_index++) {
auto iter = node->input_parameters_map_.find(input_index);
if (iter != node->input_parameters_map_.end()) {
oss << "%" << iter->second->id_ << "P"
<< ", ";
} else {
oss << "%" << node->input_tensors_[tensor_index]->GetId() << "T"
<< ", ";
tensor_index++;
}
}
oss << "]";
oss << "\toutputs[";
for (const auto &out : node->output_tensors_) {
oss << "%" << out->GetId() << "T"
<< ", ";
}
oss << "]";
oss << "\tworkspace[";
for (const auto &wk : node->workspace_tensors_) {
oss << "%" << wk->GetId() << "T"
<< ", ";
}
oss << "]";
oss << "\tstreamID["
<< "@" << node->GetStream()->GetId() << "]\n";
}
}
void Somas::DumpTensors(std::ostringstream &oss) const {
oss << "\n\nAll Tensors:\n\n";
oss << "index:"
<< "\tsize:"
<< "\treal_size:"
<< "\toffset:"
<< "\taddr:"
<< "\ttype:"
<< "\tlifelong:"
<< "\tlife_start:"
<< "\tlife_end:"
<< "\tsource node name:\n";
for (const auto &tensor : tensors_list_) {
auto scope_name = tensor->GetSourceNode()->scope_full_name_;
std::string split_name = GetSplitName(scope_name);
oss << "%" << tensor->GetId() << "T"
<< "\t"
<< "#" << tensor->GetAlignedSize() << "S"
<< "\t"
<< "#" << tensor->GetOriginalSize() << "S"
<< "\t"
<< "&" << tensor->GetOffset() << ""
<< "\t"
<< "&" << static_cast<void *>(tensor->GetOffset() + mem_base_addr_) << "\t"
<< tensor_type_name_map[tensor->type_] << "\t" << tensor->IsLifelong() << "\t" << tensor->lifetime_.start_
<< "\t" << tensor->lifetime_.end_ << "\t" << split_name << "\n";
}
}
void Somas::DumpParameters(std::ostringstream &oss) const {
oss << "All Parameters:\n\n";
oss << "index:"
<< "\tsize:"
<< "\tstart_addr:"
<< "\tsource node name:"
<< "\tnode out index:\n";
for (const auto &param : parameters_list_) {
oss << "%" << param->id_ << "P"
<< "\t"
<< "#" << param->size_ << "S"
<< "\t"
<< "&" << param->addr_ << "\t" << param->source_node_->fullname_with_scope() << "\t" << param->output_index_
<< "\n";
}
}
void Somas::DumpSomasInfoIR(const string filename) { void Somas::DumpSomasInfoIR(const string filename) {
if (filename.size() > PATH_MAX) { if (filename.size() > PATH_MAX) {
MS_LOG(ERROR) << "File path " << filename << " is too long."; MS_LOG(ERROR) << "File path " << filename << " is too long.";
@ -1108,115 +1241,52 @@ void Somas::DumpSomasInfoIR(const string filename) {
return; return;
} }
DumpParameters(ofs); ofs << SomasInfo();
DumpTensors(ofs); ofs.close();
DumpNodes(ofs);
ofs << "\n\nAll Stream Groups:\n\n";
for (const auto &stream_group : streams_groups_) {
for (const auto &stream : stream_group) {
ofs << "stm" << stream << " ";
}
ofs << "\n";
}
if (!ref_node_constraints_.empty()) {
ofs << "\n\nAll Ref Node Info:\n\n";
for (const auto &ref_in_out : ref_node_constraints_) {
ofs << "refnode input-output:";
for (const auto &item : ref_in_out) {
ofs << "%" << item << "T ";
}
ofs << "\n";
}
}
} }
void Somas::DumpNodes(std::ofstream &ofs) const { std::string Somas::Offline() {
ofs << "\n\nAll Nodes:\n\n"; std::ostringstream oss;
for (const auto &node : nodes_list_) {
auto scope_name = node->scope_full_name_; for (auto tensor : tensors_list_) {
std::string split_name = GetSplitName(scope_name); if (tensor->IsOutputOnly() || tensor->type_ == TensorType::kRefNodeOutput) {
ofs << "$" << node->GetId() << "\t" << split_name << "\t" << static_cast<int>(node->GetType()) << "\t"; oss << "Somas EDGE ERROR src=n" << tensor->GetSourceNode()->GetId()
auto input_num = node->input_tensors_.size() + node->input_parameters_map_.size(); << ", srcstm=" << tensor->GetSourceStream()->GetId() << ", dst=nc"
ofs << "inputs["; << ", dststm=nc"
size_t tensor_index = 0; << ", workspace=0, size=" << tensor->GetOriginalSize()
for (size_t input_index = 0; input_index < input_num; input_index++) { << ", lifelong=" << static_cast<int>(tensor->lifelong_value_) << ", tid=" << tensor->GetId()
auto iter = node->input_parameters_map_.find(input_index); << ", start=" << tensor->lifetime_.start_ << ", end=" << tensor->lifetime_.end_ << std::endl;
if (iter != node->input_parameters_map_.end()) { } else {
ofs << "%" << iter->second->id_ << "P" std::map<size_t, size_t> dest_infos;
<< ", "; for (SomasNodePtr dest_node : tensor->destinations_) {
} else { dest_infos.insert(std::make_pair(dest_node->GetId(), dest_node->GetStream()->GetId()));
ofs << "%" << node->input_tensors_[tensor_index]->GetId() << "T" }
<< ", ";
tensor_index++; for (auto dest_info : dest_infos) {
oss << "Somas EDGE src=n" << tensor->GetSourceNode()->GetId()
<< ", srcstm=" << tensor->GetSourceStream()->GetId() << ", dst=n" << dest_info.first
<< ", dststm=" << dest_info.second << ", workspace=" << static_cast<int>(tensor->type_ == kWorkspace)
<< ", size=" << tensor->GetOriginalSize() << ", lifelong=" << static_cast<int>(tensor->lifelong_value_)
<< ", tid=" << tensor->GetId() << ", start=" << tensor->lifetime_.start_
<< ", end=" << tensor->lifetime_.end_ << std::endl;
} }
} }
}
ofs << "]"; for (vector<size_t> tList : contiguous_tensors_list_) {
ofs << "\toutputs["; oss << "Somas CONTIGUOUS";
for (const auto &out : node->output_tensors_) { for (size_t tid : tList) {
ofs << "%" << out->GetId() << "T" oss << " " << tid;
<< ", ";
} }
ofs << "]"; oss << std::endl;
ofs << "\tworkspace["; }
for (const auto &wk : node->workspace_tensors_) { for (const auto &group : streams_groups_) {
ofs << "%" << wk->GetId() << "T" oss << "Somas GROUP";
<< ", "; for (int64_t sid : group) {
oss << " " << sid;
} }
ofs << "]"; oss << std::endl;
ofs << "\tstreamID["
<< "@" << node->GetStream()->GetId() << "]\n";
}
}
void Somas::DumpTensors(std::ofstream &ofs) const {
ofs << "\n\nAll Tensors:\n\n";
ofs << "index:"
<< "\tsize:"
<< "\treal_size:"
<< "\toffset:"
<< "\taddr:"
<< "\ttype:"
<< "\tlifelong:"
<< "\tlife_start:"
<< "\tlife_end:"
<< "\tsource node name:\n";
for (const auto &tensor : tensors_list_) {
auto scope_name = tensor->GetSourceNode()->scope_full_name_;
std::string split_name = GetSplitName(scope_name);
ofs << "%" << tensor->GetId() << "T"
<< "\t"
<< "#" << tensor->GetAlignedSize() << "S"
<< "\t"
<< "#" << tensor->GetOriginalSize() << "S"
<< "\t"
<< "&" << tensor->GetOffset() << ""
<< "\t"
<< "&" << static_cast<void *>(tensor->GetOffset() + mem_base_addr_) << "\t"
<< tensor_type_name_map[tensor->type_] << "\t" << tensor->IsLifelong() << "\t" << tensor->lifetime_.start_
<< "\t" << tensor->lifetime_.end_ << "\t" << split_name << "\n";
}
}
void Somas::DumpParameters(std::ofstream &ofs) const {
ofs << "All Parameters:\n\n";
ofs << "index:"
<< "\tsize:"
<< "\tstart_addr:"
<< "\tsource node name:"
<< "\tnode out index:\n";
for (const auto &param : parameters_list_) {
ofs << "%" << param->id_ << "P"
<< "\t"
<< "#" << param->size_ << "S"
<< "\t"
<< "&" << param->addr_ << "\t" << param->source_node_->fullname_with_scope() << "\t" << param->output_index_
<< "\n";
} }
return oss.str();
} }
void Somas::DumpOfflineIR(const string filename) { void Somas::DumpOfflineIR(const string filename) {
@ -1240,66 +1310,12 @@ void Somas::DumpOfflineIR(const string filename) {
return; return;
} }
for (auto tensor : tensors_list_) { ofs << Offline();
if (tensor->IsOutputOnly() || tensor->type_ == TensorType::kRefNodeOutput) {
ofs << "Somas EDGE ERROR src=n" << tensor->GetSourceNode()->GetId()
<< ", srcstm=" << tensor->GetSourceStream()->GetId() << ", dst=nc"
<< ", dststm=nc"
<< ", workspace=0, size=" << tensor->GetOriginalSize()
<< ", lifelong=" << static_cast<int>(tensor->lifelong_value_) << ", tid=" << tensor->GetId()
<< ", start=" << tensor->lifetime_.start_ << ", end=" << tensor->lifetime_.end_ << std::endl;
} else {
std::map<size_t, size_t> dest_infos;
for (SomasNodePtr dest_node : tensor->destinations_) {
dest_infos.insert(std::make_pair(dest_node->GetId(), dest_node->GetStream()->GetId()));
}
for (auto dest_info : dest_infos) {
ofs << "Somas EDGE src=n" << tensor->GetSourceNode()->GetId()
<< ", srcstm=" << tensor->GetSourceStream()->GetId() << ", dst=n" << dest_info.first
<< ", dststm=" << dest_info.second << ", workspace=" << static_cast<int>(tensor->type_ == kWorkspace)
<< ", size=" << tensor->GetOriginalSize() << ", lifelong=" << static_cast<int>(tensor->lifelong_value_)
<< ", tid=" << tensor->GetId() << ", start=" << tensor->lifetime_.start_
<< ", end=" << tensor->lifetime_.end_ << std::endl;
}
}
}
for (vector<size_t> tList : contiguous_tensors_list_) {
ofs << "Somas CONTIGUOUS";
for (size_t tid : tList) {
ofs << " " << tid;
}
ofs << std::endl;
}
for (const auto &group : streams_groups_) {
ofs << "Somas GROUP";
for (int64_t sid : group) {
ofs << " " << sid;
}
ofs << std::endl;
}
ofs.close(); ofs.close();
} }
void Somas::DumpSomasMemoryIR(const string filename) { std::string Somas::SomasMemory() {
if (filename.size() > PATH_MAX) { std::ostringstream oss;
MS_LOG(ERROR) << "File path " << filename << " is too long.";
return;
}
auto real_path = Common::GetRealPath(filename);
if (!real_path.has_value()) {
MS_LOG(ERROR) << "Get real path failed. path=" << filename;
return;
}
ChangeFileMode(real_path.value(), S_IRWXU);
std::ofstream ofs(real_path.value());
if (!ofs.is_open()) {
MS_LOG(ERROR) << "Open dump file '" << real_path.value() << "' failed!";
return;
}
std::map<size_t, size_t> mem_map; std::map<size_t, size_t> mem_map;
for (auto tensor : tensors_list_) { for (auto tensor : tensors_list_) {
@ -1325,7 +1341,7 @@ void Somas::DumpSomasMemoryIR(const string filename) {
} }
} }
ofs << "mem_id:" oss << "mem_id:"
<< "\tstart_offset:" << "\tstart_offset:"
<< "\tend_offset:" << "\tend_offset:"
<< "\ttensor_id:" << "\ttensor_id:"
@ -1353,7 +1369,7 @@ void Somas::DumpSomasMemoryIR(const string filename) {
} }
std::string split_name = GetSplitName(scope_name); std::string split_name = GetSplitName(scope_name);
ofs << "#" << mem_map[tensor->GetOffset()] << "\t" << tensor->GetOffset() << "\t" oss << "#" << mem_map[tensor->GetOffset()] << "\t" << tensor->GetOffset() << "\t"
<< tensor->GetOffset() + tensor->GetAlignedSize() << "\t%" << tensor->GetId() << "T\t" << tensor->GetOffset() + tensor->GetAlignedSize() << "\t%" << tensor->GetId() << "T\t"
<< tensor->GetOriginalSize() << "\t" << tensor->GetAlignedSize() << "\t&" << tensor->GetOriginalSize() << "\t" << tensor->GetAlignedSize() << "\t&"
<< static_cast<void *>(tensor->GetOffset() + mem_base_addr_) << "\t&" << static_cast<void *>(tensor->GetOffset() + mem_base_addr_) << "\t&"
@ -1362,6 +1378,31 @@ void Somas::DumpSomasMemoryIR(const string filename) {
<< tensor->lifetime_.start_ << "\t" << tensor->lifetime_.end_ << "\n"; << tensor->lifetime_.start_ << "\t" << tensor->lifetime_.end_ << "\n";
} }
} }
return oss.str();
}
void Somas::DumpSomasMemoryIR(const string filename) {
if (filename.size() > PATH_MAX) {
MS_LOG(ERROR) << "File path " << filename << " is too long.";
return;
}
auto real_path = Common::GetRealPath(filename);
if (!real_path.has_value()) {
MS_LOG(ERROR) << "Get real path failed. path=" << filename;
return;
}
ChangeFileMode(real_path.value(), S_IRWXU);
std::ofstream ofs(real_path.value());
if (!ofs.is_open()) {
MS_LOG(ERROR) << "Open dump file '" << real_path.value() << "' failed!";
return;
}
ofs << SomasMemory();
ofs.close();
} }
size_t Somas::CalcLowerBound() const { size_t Somas::CalcLowerBound() const {

View File

@ -1,5 +1,5 @@
/** /**
* Copyright 2020 Huawei Technologies Co., Ltd * Copyright 2020-2021 Huawei Technologies Co., Ltd
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -49,6 +49,8 @@ class Somas {
uint8_t *GetNodeOutputPtr(const AnfNodePtr &node, size_t index) const; uint8_t *GetNodeOutputPtr(const AnfNodePtr &node, size_t index) const;
uint8_t *GetNodeWorkSpacePtr(const AnfNodePtr &node, size_t index) const; uint8_t *GetNodeWorkSpacePtr(const AnfNodePtr &node, size_t index) const;
std::string SomasInfo();
std::string SomasMemory();
void DumpSomasInfoIR(const string filename); void DumpSomasInfoIR(const string filename);
void DumpSomasMemoryIR(const string filename); void DumpSomasMemoryIR(const string filename);
@ -121,8 +123,8 @@ class Somas {
bool Assign(const session::KernelGraph *graph); bool Assign(const session::KernelGraph *graph);
std::string Offline();
void DumpOfflineIR(const string filename); void DumpOfflineIR(const string filename);
void DumpSomasMemoryPoolInfoIR(const string filename);
std::string GetSplitName(const string &scope_name) const; std::string GetSplitName(const string &scope_name) const;
size_t CalcLowerBound() const; size_t CalcLowerBound() const;
void GenGraphStatisticInfo(); void GenGraphStatisticInfo();
@ -143,9 +145,9 @@ class Somas {
void UpdateRefOverlapTensorsConflicts(); void UpdateRefOverlapTensorsConflicts();
void UpdateRefTensorsOffset(); void UpdateRefTensorsOffset();
void UpdateContiguousTensorsOffset(const std::map<size_t, size_t> &contiguous_ref_list_map); void UpdateContiguousTensorsOffset(const std::map<size_t, size_t> &contiguous_ref_list_map);
void DumpParameters(std::ofstream &ofs) const; void DumpParameters(std::ostringstream &oss) const;
void DumpTensors(std::ofstream &ofs) const; void DumpTensors(std::ostringstream &oss) const;
void DumpNodes(std::ofstream &ofs) const; void DumpNodes(std::ostringstream &oss) const;
std::map<size_t, size_t> GetContiguousListContainRefTensor(); std::map<size_t, size_t> GetContiguousListContainRefTensor();
std::map<size_t, size_t> GetRefTensorsInContiguousList(); std::map<size_t, size_t> GetRefTensorsInContiguousList();
}; };

View File

@ -8,7 +8,7 @@ set(_DEBUG_SRC_LIST
"${CMAKE_CURRENT_SOURCE_DIR}/env_config_parser.cc" "${CMAKE_CURRENT_SOURCE_DIR}/env_config_parser.cc"
"${CMAKE_CURRENT_SOURCE_DIR}/rdr/graph_exec_order_recorder.cc" "${CMAKE_CURRENT_SOURCE_DIR}/rdr/graph_exec_order_recorder.cc"
"${CMAKE_CURRENT_SOURCE_DIR}/rdr/graph_recorder.cc" "${CMAKE_CURRENT_SOURCE_DIR}/rdr/graph_recorder.cc"
"${CMAKE_CURRENT_SOURCE_DIR}/rdr/somas_recorder.cc" "${CMAKE_CURRENT_SOURCE_DIR}/rdr/string_recorder.cc"
"${CMAKE_CURRENT_SOURCE_DIR}/rdr/recorder_manager.cc" "${CMAKE_CURRENT_SOURCE_DIR}/rdr/recorder_manager.cc"
"${CMAKE_CURRENT_SOURCE_DIR}/rdr/running_data_recorder.cc" "${CMAKE_CURRENT_SOURCE_DIR}/rdr/running_data_recorder.cc"
) )

View File

@ -16,9 +16,9 @@
#include "debug/rdr/running_data_recorder.h" #include "debug/rdr/running_data_recorder.h"
#include <utility> #include <utility>
#include "debug/rdr/graph_recorder.h" #include "debug/rdr/graph_recorder.h"
#include "debug/rdr/somas_recorder.h"
#include "debug/rdr/graph_exec_order_recorder.h" #include "debug/rdr/graph_exec_order_recorder.h"
#include "debug/rdr/recorder_manager.h" #include "debug/rdr/recorder_manager.h"
#include "debug/rdr/string_recorder.h"
#include "mindspore/core/ir/func_graph.h" #include "mindspore/core/ir/func_graph.h"
#include "mindspore/core/ir/anf.h" #include "mindspore/core/ir/anf.h"
@ -75,15 +75,16 @@ bool RecordGraphExecOrder(const SubModuleId module, const std::string &tag,
return ans; return ans;
} }
bool RecordSomasInfo(const SubModuleId module, const std::string &tag, const SomasPtr &somas_ptr, int graph_id) { bool RecordString(SubModuleId module, const std::string &tag, const std::string &data, const std::string &filename) {
std::string submodule_name = std::string(GetSubModuleName(module)); std::string submodule_name = std::string(GetSubModuleName(module));
SomasRecorderPtr somas_recorder = std::make_shared<SomasRecorder>(submodule_name, tag, somas_ptr, graph_id); StringRecorderPtr string_recorder = std::make_shared<StringRecorder>(submodule_name, tag, data, filename);
somas_recorder->GenString(); string_recorder->SetFilename(filename);
bool ans = mindspore::RecorderManager::Instance().RecordObject(std::move(somas_recorder)); bool ans = mindspore::RecorderManager::Instance().RecordObject(std::move(string_recorder));
return ans; return ans;
} }
void TriggerAll() { mindspore::RecorderManager::Instance().TriggerAll(); } void TriggerAll() { mindspore::RecorderManager::Instance().TriggerAll(); }
#else #else
bool RecordAnfGraph(const SubModuleId module, const std::string &tag, const FuncGraphPtr &graph, bool RecordAnfGraph(const SubModuleId module, const std::string &tag, const FuncGraphPtr &graph,
const std::string &file_type, int graph_id) { const std::string &file_type, int graph_id) {
@ -107,7 +108,7 @@ bool RecordGraphExecOrder(const SubModuleId module, const std::string &tag, std:
return false; return false;
} }
bool RecordSomasInfo(const SubModuleId module, const std::string &tag, const SomasPtr &somas_ptr, int graph_id) { bool RecordString(SubModuleId module, const std::string &tag, const std::string &data, const std::string &filename) {
static bool already_printed = false; static bool already_printed = false;
if (already_printed) { if (already_printed) {
return false; return false;
@ -116,6 +117,7 @@ bool RecordSomasInfo(const SubModuleId module, const std::string &tag, const Som
MS_LOG(WARNING) << "The RDR presently only support linux os."; MS_LOG(WARNING) << "The RDR presently only support linux os.";
return false; return false;
} }
void TriggerAll() { void TriggerAll() {
static bool already_printed = false; static bool already_printed = false;
if (already_printed) { if (already_printed) {

View File

@ -13,28 +13,27 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#ifndef RUNNING_DATA_RECORDER_H_ #ifndef MINDSPORE_CCSRC_DEBUG_RDR_RUNNING_DATA_RECORDER_H_
#define RUNNING_DATA_RECORDER_H_ #define MINDSPORE_CCSRC_DEBUG_RDR_RUNNING_DATA_RECORDER_H_
#include <vector> #include <vector>
#include <string> #include <string>
#include <memory> #include <memory>
#include "backend/optimizer/somas/somas.h"
#include "mindspore/core/utils/log_adapter.h" #include "mindspore/core/utils/log_adapter.h"
namespace mindspore { namespace mindspore {
class FuncGraph; class FuncGraph;
class CNode; class CNode;
using FuncGraphPtr = std::shared_ptr<FuncGraph>; using FuncGraphPtr = std::shared_ptr<FuncGraph>;
using CNodePtr = std::shared_ptr<CNode>; using CNodePtr = std::shared_ptr<CNode>;
using SomasPtr = std::shared_ptr<somas::Somas>;
namespace RDR { namespace RDR {
bool RecordAnfGraph(const SubModuleId module, const std::string &tag, const FuncGraphPtr &graph, bool RecordAnfGraph(const SubModuleId module, const std::string &tag, const FuncGraphPtr &graph,
const std::string &file_type = ".ir;.pb;.dat", int graph_id = 0); const std::string &file_type = ".ir;.pb;.dat", int graph_id = 0);
bool RecordGraphExecOrder(const SubModuleId module, const std::string &tag, bool RecordGraphExecOrder(const SubModuleId module, const std::string &tag,
const std::vector<CNodePtr> &&final_exec_order); const std::vector<CNodePtr> &&final_exec_order);
bool RecordSomasInfo(const SubModuleId module, const std::string &tag, const SomasPtr &somas_ptr, int graph_id); bool RecordString(SubModuleId module, const std::string &tag, const std::string &data,
const std::string &filename = "");
void TriggerAll(); void TriggerAll();
} // namespace RDR } // namespace RDR
} // namespace mindspore } // namespace mindspore
#endif // RUNNING_DATA_RECORDER_H_ #endif // MINDSPORE_CCSRC_DEBUG_RDR_RUNNING_DATA_RECORDER_H_

View File

@ -1,30 +0,0 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "debug/rdr/somas_recorder.h"
#include "backend/optimizer/somas/somas.h"
namespace mindspore {
void SomasRecorder::Export() {
if (filename_.empty()) {
filename_ = directory_ + module_ + "_" + tag_;
}
std::string filename = filename_ + "_somas_after_allocate_" + std::to_string(graph_id_) + "_" + timestamp_ + ".ir";
somas_reuse_util_ptr_->DumpSomasInfoIR(filename);
std::string mem_filename = filename_ + "_somas_mem_info_" + std::to_string(graph_id_) + "_" + timestamp_ + ".ir";
somas_reuse_util_ptr_->DumpSomasMemoryIR(mem_filename);
}
bool SomasRecorder::GenString() { return true; }
} // namespace mindspore

View File

@ -0,0 +1,51 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "debug/rdr/string_recorder.h"
#include <sys/stat.h>
#include <fstream>
#include "debug/common.h"
#include "utils/utils.h"
#include "mindspore/core/utils/log_adapter.h"
namespace mindspore {
void StringRecorder::Export() {
if (directory_.back() != '/') {
directory_ += "/";
}
if (filename_.empty()) {
filename_ = module_ + "_" + tag_ + "_" + timestamp_ + ".txt";
}
std::string file_path = directory_ + filename_;
auto realpath = Common::GetRealPath(file_path);
if (!realpath.has_value()) {
MS_LOG(ERROR) << "Get real path failed. path=" << file_path;
return;
}
ChangeFileMode(realpath.value(), S_IRWXU);
std::ofstream fout(realpath.value(), std::ofstream::app);
if (!fout.is_open()) {
MS_LOG(WARNING) << "Open file for saving string failed.";
return;
}
fout << data_;
fout.close();
// set file mode to read only by user
ChangeFileMode(realpath.value(), S_IRUSR);
}
} // namespace mindspore

View File

@ -13,34 +13,28 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
#ifndef SOMAS_RECORDER_H_ #ifndef MINDSPORE_CCSRC_DEBUG_RDR_STRING_RECORDER_H_
#define SOMAS_RECORDER_H_ #define MINDSPORE_CCSRC_DEBUG_RDR_STRING_RECORDER_H_
#include <string> #include <string>
#include <sstream>
#include <memory> #include <memory>
#include "debug/rdr/base_recorder.h"
#include "debug/rdr/base_recorder.h"
namespace mindspore { namespace mindspore {
namespace somas { class StringRecorder : public BaseRecorder {
class Somas;
using SomasPtr = std::shared_ptr<Somas>;
} // namespace somas
class SomasRecorder : public BaseRecorder {
public: public:
SomasRecorder() : BaseRecorder(), somas_reuse_util_ptr_(nullptr) {} StringRecorder() : BaseRecorder() {}
SomasRecorder(const std::string &module, const std::string &tag, const somas::SomasPtr &somas_reuse_util_ptr, StringRecorder(const std::string &module, const std::string &tag, const std::string &data,
int graph_id) const std::string &file_type)
: BaseRecorder(module, tag), somas_reuse_util_ptr_(somas_reuse_util_ptr), graph_id_(graph_id) {} : BaseRecorder(module, tag), data_(data) {}
~StringRecorder() {}
void SetModule(const std::string &module) { module_ = module; } void SetModule(const std::string &module) { module_ = module; }
void SetSomas(const somas::SomasPtr &somas_reuse_util_ptr) { somas_reuse_util_ptr_ = somas_reuse_util_ptr; } void SetFilename(const std::string &filename) { filename_ = filename; }
bool GenString();
virtual void Export(); virtual void Export();
private: private:
somas::SomasPtr somas_reuse_util_ptr_; std::string data_;
int graph_id_{0};
}; };
using SomasRecorderPtr = std::shared_ptr<SomasRecorder>; using StringRecorderPtr = std::shared_ptr<StringRecorder>;
} // namespace mindspore } // namespace mindspore
#endif // SOMAS_RECORDER_H #endif // MINDSPORE_CCSRC_DEBUG_RDR_STRING_RECORDER_H_

View File

@ -17,6 +17,7 @@
#include "runtime/device/memory_manager.h" #include "runtime/device/memory_manager.h"
#include <string> #include <string>
#include "backend/session/anf_runtime_algorithm.h" #include "backend/session/anf_runtime_algorithm.h"
#include "debug/rdr/running_data_recorder.h"
#include "utils/ms_context.h" #include "utils/ms_context.h"
using mindspore::memreuse::BestFitMemReuse; using mindspore::memreuse::BestFitMemReuse;
@ -69,6 +70,16 @@ void MemoryManager::MallocSomasDynamicMem(const session::KernelGraph *graph) {
auto context_ptr = MsContext::GetInstance(); auto context_ptr = MsContext::GetInstance();
MS_EXCEPTION_IF_NULL(context_ptr); MS_EXCEPTION_IF_NULL(context_ptr);
#ifdef ENABLE_DUMP_IR
SubModuleId module = SubModuleId::SM_OPTIMIZER;
std::string tag = "somas";
std::string filename = "somas_allocate_info_" + std::to_string(graph->graph_id()) + ".ir";
mindspore::RDR::RecordString(module, tag, somas_reuse_util_ptr_->SomasInfo(), filename);
filename = "somas_mem_info_" + std::to_string(graph->graph_id()) + ".ir";
mindspore::RDR::RecordString(module, tag, somas_reuse_util_ptr_->SomasMemory(), filename);
#endif
bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG); bool save_graphs = context_ptr->get_param<bool>(MS_CTX_SAVE_GRAPHS_FLAG);
auto save_graphs_path = context_ptr->get_param<std::string>(MS_CTX_SAVE_GRAPHS_PATH); auto save_graphs_path = context_ptr->get_param<std::string>(MS_CTX_SAVE_GRAPHS_PATH);
if (save_graphs_path.empty()) { if (save_graphs_path.empty()) {