forked from mindspore-Ecosystem/mindspore
!19427 Fix offline dbg issue: multiple read_tensor output if dumping twice
Merge pull request !19427 from TinaMengtingZhang/multi_output_when_dump_twice
This commit is contained in:
commit
7dffa5096c
|
@ -686,6 +686,16 @@ void DebugServices::SetPrefixToCheck(std::string *prefix_dump_file_name, std::st
|
||||||
*prefix_dump_file_name = dump_style_name_part;
|
*prefix_dump_file_name = dump_style_name_part;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string GetNewestFilePath(std::vector<std::string> file_list) {
|
||||||
|
// get file with the newest timestamp from the list.
|
||||||
|
std::string newest_file;
|
||||||
|
if (file_list.empty()) {
|
||||||
|
return newest_file;
|
||||||
|
}
|
||||||
|
std::sort(file_list.begin(), file_list.end());
|
||||||
|
return file_list.back();
|
||||||
|
}
|
||||||
|
|
||||||
void DebugServices::ReadDumpedTensor(std::vector<std::string> backend_name, std::vector<size_t> slot,
|
void DebugServices::ReadDumpedTensor(std::vector<std::string> backend_name, std::vector<size_t> slot,
|
||||||
std::vector<unsigned int> device_id, std::vector<unsigned int> iteration,
|
std::vector<unsigned int> device_id, std::vector<unsigned int> iteration,
|
||||||
std::vector<unsigned int> root_graph_id, const std::vector<bool> &is_output,
|
std::vector<unsigned int> root_graph_id, const std::vector<bool> &is_output,
|
||||||
|
@ -713,9 +723,10 @@ void DebugServices::ReadDumpedTensor(std::vector<std::string> backend_name, std:
|
||||||
if (is_sync_mode) {
|
if (is_sync_mode) {
|
||||||
DIR *d;
|
DIR *d;
|
||||||
d = opendir(specific_dump_dir.c_str());
|
d = opendir(specific_dump_dir.c_str());
|
||||||
|
bool found_file = false;
|
||||||
|
std::vector<std::string> matched_paths;
|
||||||
if (d != nullptr) {
|
if (d != nullptr) {
|
||||||
struct dirent *dir = nullptr;
|
struct dirent *dir = nullptr;
|
||||||
bool found_file = false;
|
|
||||||
while ((dir = readdir(d)) != NULL) {
|
while ((dir = readdir(d)) != NULL) {
|
||||||
if (dir->d_type == DT_REG) {
|
if (dir->d_type == DT_REG) {
|
||||||
std::string file_name = dir->d_name;
|
std::string file_name = dir->d_name;
|
||||||
|
@ -729,22 +740,25 @@ void DebugServices::ReadDumpedTensor(std::vector<std::string> backend_name, std:
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
shape.clear();
|
|
||||||
std::string full_path = specific_dump_dir + "/" + file_name;
|
std::string full_path = specific_dump_dir + "/" + file_name;
|
||||||
ReadTensorFromNpy(full_path, &type_name, &data_size, &shape, &buffer);
|
matched_paths.push_back(full_path);
|
||||||
AddToTensorData(backend_name[i], slot[i], iteration[i], device_id[i], root_graph_id[i], is_output[i],
|
|
||||||
data_size, type_name, shape, buffer, result_list);
|
|
||||||
found_file = true;
|
found_file = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!found_file) {
|
} else {
|
||||||
AddToTensorData(backend_name[i], slot[i], iteration[i], device_id[i], root_graph_id[i], is_output[i], 0,
|
MS_LOG(INFO) << "Directory " << specific_dump_dir << " does not exist!";
|
||||||
type_name, shape, buffer, result_list);
|
}
|
||||||
}
|
|
||||||
|
if (found_file) {
|
||||||
|
shape.clear();
|
||||||
|
std::string result_path = GetNewestFilePath(matched_paths);
|
||||||
|
ReadTensorFromNpy(result_path, &type_name, &data_size, &shape, &buffer);
|
||||||
|
AddToTensorData(backend_name[i], slot[i], iteration[i], device_id[i], root_graph_id[i], is_output[i], data_size,
|
||||||
|
type_name, shape, buffer, result_list);
|
||||||
} else {
|
} else {
|
||||||
AddToTensorData(backend_name[i], slot[i], iteration[i], device_id[i], root_graph_id[i], is_output[i], 0,
|
AddToTensorData(backend_name[i], slot[i], iteration[i], device_id[i], root_graph_id[i], is_output[i], 0,
|
||||||
type_name, shape, buffer, result_list);
|
type_name, shape, buffer, result_list);
|
||||||
MS_LOG(INFO) << "directory does not exist!";
|
MS_LOG(INFO) << "Target tensor has not been found.";
|
||||||
}
|
}
|
||||||
closedir(d);
|
closedir(d);
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Reference in New Issue