Ignore renormalization if no changes made after last renormalization.

Release all data after finished RDR checking.
This commit is contained in:
Zhang Qinghua 2021-02-23 09:39:21 +08:00
parent ddea9d52eb
commit 88e4dbeb04
7 changed files with 21 additions and 3 deletions

View File

@ -984,6 +984,9 @@ void AscendSession::Execute(const std::shared_ptr<KernelGraph> &kernel_graph, bo
#endif
MS_LOG(EXCEPTION) << "run task error!";
}
#ifdef ENABLE_DUMP_IR
mindspore::RDR::ClearAll();
#endif
MS_LOG(INFO) << "Finish!";
}

View File

@ -57,7 +57,7 @@ class BaseRecorder {
timestamp_ = ss.str();
}
}
~BaseRecorder() {}
virtual ~BaseRecorder() {}
std::string GetModule() const { return module_; }
std::string GetTag() const { return tag_; }

View File

@ -52,4 +52,6 @@ void RecorderManager::TriggerAll() {
MS_LOG(WARNING) << "There is no recorder to export.";
}
}
void RecorderManager::ClearAll() { recorder_container_.clear(); }
} // namespace mindspore

View File

@ -36,6 +36,7 @@ class RecorderManager {
bool RecordObject(const BaseRecorderPtr &recorder);
void TriggerAll();
void ClearAll();
private:
RecorderManager() {}

View File

@ -87,6 +87,8 @@ bool RecordString(SubModuleId module, const std::string &tag, const std::string
void TriggerAll() { mindspore::RecorderManager::Instance().TriggerAll(); }
void ClearAll() { mindspore::RecorderManager::Instance().ClearAll(); }
#else
bool RecordAnfGraph(const SubModuleId module, const std::string &tag, const FuncGraphPtr &graph, bool full_name,
const std::string &file_type, int graph_id) {

View File

@ -34,6 +34,7 @@ bool RecordGraphExecOrder(const SubModuleId module, const std::string &tag,
bool RecordString(SubModuleId module, const std::string &tag, const std::string &data,
const std::string &filename = "");
void TriggerAll();
void ClearAll();
} // namespace RDR
} // namespace mindspore
#endif // MINDSPORE_CCSRC_DEBUG_RDR_RUNNING_DATA_RECORDER_H_

View File

@ -146,15 +146,22 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
// Optimizer step counter;
int64_t counter = 1;
bool changes = true;
// If no changes since last renormalization, then no need to do the renormalization again.
// Set the initial value to true, so the renormalization can be executed once if it's the
// only pass.
bool changes_since_last_renorm = true;
while (changes) {
changes = false;
auto run_runc = [&counter, &func_graph, &changes, use_profile, this]() {
auto run_runc = [&counter, &func_graph, &changes, &changes_since_last_renorm, use_profile, this]() {
for (size_t i = 0; i < passes_.size(); ++i) {
const OptPass &opt = passes_[i];
CurPass_ = {counter, pass_names_[i]};
auto opt_func = [&func_graph, &changes, &opt, this]() {
auto opt_func = [&func_graph, &changes, &opt, &changes_since_last_renorm, this]() {
if (opt.is_renormalize()) {
if (!changes_since_last_renorm) {
return;
}
auto resource_ptr = std::dynamic_pointer_cast<pipeline::Resource>(resource_);
if (resource_ptr != nullptr) {
// StepParallel may replace the AbstractValue of the parameters of func_graph,
@ -177,8 +184,10 @@ class Optimizer : public std::enable_shared_from_this<Optimizer> {
func_graph = pipeline::Renormalize(resource_ptr, func_graph, maybe_new_args_spec);
}
}
changes_since_last_renorm = false;
} else if (opt(func_graph, shared_from_this())) {
changes = true;
changes_since_last_renorm = true;
}
};
use_profile ? (WITH(MsProfile::GetProfile()->Step(pass_names_[i])) opt_func) : opt_func();