Ignore renormalization if no changes made after last renormalization.
Release all data after finished RDR checking.
This commit is contained in:
parent
ddea9d52eb
commit
88e4dbeb04
|
@ -984,6 +984,9 @@ void AscendSession::Execute(const std::shared_ptr<KernelGraph> &kernel_graph, bo
|
||||||
#endif
|
#endif
|
||||||
MS_LOG(EXCEPTION) << "run task error!";
|
MS_LOG(EXCEPTION) << "run task error!";
|
||||||
}
|
}
|
||||||
|
#ifdef ENABLE_DUMP_IR
|
||||||
|
mindspore::RDR::ClearAll();
|
||||||
|
#endif
|
||||||
MS_LOG(INFO) << "Finish!";
|
MS_LOG(INFO) << "Finish!";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -57,7 +57,7 @@ class BaseRecorder {
|
||||||
timestamp_ = ss.str();
|
timestamp_ = ss.str();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
~BaseRecorder() {}
|
virtual ~BaseRecorder() {}
|
||||||
|
|
||||||
std::string GetModule() const { return module_; }
|
std::string GetModule() const { return module_; }
|
||||||
std::string GetTag() const { return tag_; }
|
std::string GetTag() const { return tag_; }
|
||||||
|
|
|
@ -52,4 +52,6 @@ void RecorderManager::TriggerAll() {
|
||||||
MS_LOG(WARNING) << "There is no recorder to export.";
|
MS_LOG(WARNING) << "There is no recorder to export.";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void RecorderManager::ClearAll() { recorder_container_.clear(); }
|
||||||
} // namespace mindspore
|
} // namespace mindspore
|
||||||
|
|
|
@ -36,6 +36,7 @@ class RecorderManager {
|
||||||
|
|
||||||
bool RecordObject(const BaseRecorderPtr &recorder);
|
bool RecordObject(const BaseRecorderPtr &recorder);
|
||||||
void TriggerAll();
|
void TriggerAll();
|
||||||
|
void ClearAll();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
RecorderManager() {}
|
RecorderManager() {}
|
||||||
|
|
|
@ -87,6 +87,8 @@ bool RecordString(SubModuleId module, const std::string &tag, const std::string
|
||||||
|
|
||||||
void TriggerAll() { mindspore::RecorderManager::Instance().TriggerAll(); }
|
void TriggerAll() { mindspore::RecorderManager::Instance().TriggerAll(); }
|
||||||
|
|
||||||
|
void ClearAll() { mindspore::RecorderManager::Instance().ClearAll(); }
|
||||||
|
|
||||||
#else
|
#else
|
||||||
bool RecordAnfGraph(const SubModuleId module, const std::string &tag, const FuncGraphPtr &graph, bool full_name,
|
bool RecordAnfGraph(const SubModuleId module, const std::string &tag, const FuncGraphPtr &graph, bool full_name,
|
||||||
const std::string &file_type, int graph_id) {
|
const std::string &file_type, int graph_id) {
|
||||||
|
|
|
@ -34,6 +34,7 @@ bool RecordGraphExecOrder(const SubModuleId module, const std::string &tag,
|
||||||
bool RecordString(SubModuleId module, const std::string &tag, const std::string &data,
|
bool RecordString(SubModuleId module, const std::string &tag, const std::string &data,
|
||||||
const std::string &filename = "");
|
const std::string &filename = "");
|
||||||
void TriggerAll();
|
void TriggerAll();
|
||||||
|
void ClearAll();
|
||||||
} // namespace RDR
|
} // namespace RDR
|
||||||
} // namespace mindspore
|
} // namespace mindspore
|
||||||
#endif // MINDSPORE_CCSRC_DEBUG_RDR_RUNNING_DATA_RECORDER_H_
|
#endif // MINDSPORE_CCSRC_DEBUG_RDR_RUNNING_DATA_RECORDER_H_
|
||||||
|
|
|
@ -146,15 +146,22 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
|
||||||
// Optimizer step counter;
|
// Optimizer step counter;
|
||||||
int64_t counter = 1;
|
int64_t counter = 1;
|
||||||
bool changes = true;
|
bool changes = true;
|
||||||
|
// If no changes since last renormalization, then no need to do the renormalization again.
|
||||||
|
// Set the initial value to true, so the renormalization can be executed once if it's the
|
||||||
|
// only pass.
|
||||||
|
bool changes_since_last_renorm = true;
|
||||||
|
|
||||||
while (changes) {
|
while (changes) {
|
||||||
changes = false;
|
changes = false;
|
||||||
auto run_runc = [&counter, &func_graph, &changes, use_profile, this]() {
|
auto run_runc = [&counter, &func_graph, &changes, &changes_since_last_renorm, use_profile, this]() {
|
||||||
for (size_t i = 0; i < passes_.size(); ++i) {
|
for (size_t i = 0; i < passes_.size(); ++i) {
|
||||||
const OptPass &opt = passes_[i];
|
const OptPass &opt = passes_[i];
|
||||||
CurPass_ = {counter, pass_names_[i]};
|
CurPass_ = {counter, pass_names_[i]};
|
||||||
auto opt_func = [&func_graph, &changes, &opt, this]() {
|
auto opt_func = [&func_graph, &changes, &opt, &changes_since_last_renorm, this]() {
|
||||||
if (opt.is_renormalize()) {
|
if (opt.is_renormalize()) {
|
||||||
|
if (!changes_since_last_renorm) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
auto resource_ptr = std::dynamic_pointer_cast<pipeline::Resource>(resource_);
|
auto resource_ptr = std::dynamic_pointer_cast<pipeline::Resource>(resource_);
|
||||||
if (resource_ptr != nullptr) {
|
if (resource_ptr != nullptr) {
|
||||||
// StepParallel may replace the AbstractValue of the parameters of func_graph,
|
// StepParallel may replace the AbstractValue of the parameters of func_graph,
|
||||||
|
@ -177,8 +184,10 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
|
||||||
func_graph = pipeline::Renormalize(resource_ptr, func_graph, maybe_new_args_spec);
|
func_graph = pipeline::Renormalize(resource_ptr, func_graph, maybe_new_args_spec);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
changes_since_last_renorm = false;
|
||||||
} else if (opt(func_graph, shared_from_this())) {
|
} else if (opt(func_graph, shared_from_this())) {
|
||||||
changes = true;
|
changes = true;
|
||||||
|
changes_since_last_renorm = true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
use_profile ? (WITH(MsProfile::GetProfile()->Step(pass_names_[i])) opt_func) : opt_func();
|
use_profile ? (WITH(MsProfile::GetProfile()->Step(pass_names_[i])) opt_func) : opt_func();
|
||||||
|
|
Loading…
Reference in New Issue