forked from mindspore-Ecosystem/mindspore
!16318 Revert the change of GenerateDumpPath
From: @tina_mengting_zhang Reviewed-by: @john_tzanakakis,@nsyca Signed-off-by: @john_tzanakakis
This commit is contained in:
commit
417b719d1d
|
@ -29,7 +29,7 @@ void CPUE2eDump::DumpCNodeData(const CNodePtr &node, uint32_t graph_id) {
|
|||
|
||||
MS_LOG(DEBUG) << "E2e dump CNode data start: " << kernel_name << ", current iteration is "
|
||||
<< dump_json_parser.cur_dump_iter();
|
||||
std::string dump_path = GenerateDumpPath(graph_id);
|
||||
std::string dump_path = GenerateDumpPath();
|
||||
if (dump_json_parser.InputNeedDump()) {
|
||||
DumpCNodeInputs(node, dump_path);
|
||||
}
|
||||
|
@ -134,7 +134,7 @@ void CPUE2eDump::DumpParametersAndConst(const session::KernelGraph *graph, uint3
|
|||
MS_LOG(INFO) << "Start e2e dump parameters and Const values";
|
||||
std::map<std::string, size_t> const_map;
|
||||
GetConstantId(graph, &const_map);
|
||||
const std::string &dump_path = GenerateDumpPath(graph_id);
|
||||
const std::string &dump_path = GenerateDumpPath();
|
||||
|
||||
// dump parameters
|
||||
const auto ¶meters = graph->inputs();
|
||||
|
|
|
@ -34,7 +34,7 @@ uint32_t ConvertPhysicalDeviceId(uint32_t device_id) {
|
|||
return kernel_runtime->device_id();
|
||||
}
|
||||
|
||||
std::string GenerateDumpPath(uint32_t graph_id, const uint32_t *device_id) {
|
||||
std::string GenerateDumpPath(const uint32_t *device_id) {
|
||||
auto &dump_json_parser = DumpJsonParser::GetInstance();
|
||||
std::string net_name = dump_json_parser.net_name();
|
||||
std::string iterator = std::to_string(dump_json_parser.cur_dump_iter());
|
||||
|
@ -42,9 +42,12 @@ std::string GenerateDumpPath(uint32_t graph_id, const uint32_t *device_id) {
|
|||
if (dump_path.back() != '/') {
|
||||
dump_path += "/";
|
||||
}
|
||||
uint32_t physical_device = device_id == nullptr ? 0 : ConvertPhysicalDeviceId(*device_id);
|
||||
dump_path += ("rank_" + std::to_string(physical_device) + "/" + net_name + "/graph_" + std::to_string(graph_id) +
|
||||
"/iteration_" + iterator);
|
||||
if (device_id == nullptr) {
|
||||
dump_path += (net_name + "/iteration_" + iterator);
|
||||
} else {
|
||||
auto physical_device = ConvertPhysicalDeviceId(*device_id);
|
||||
dump_path += (net_name + "/device_" + std::to_string(physical_device) + "/iteration_" + iterator);
|
||||
}
|
||||
return dump_path;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ namespace mindspore {
|
|||
static const size_t PARAMETER_OUTPUT_INDEX = 0;
|
||||
static const size_t VALUE_NODE_OUTPUT_INDEX = 0;
|
||||
|
||||
std::string GenerateDumpPath(uint32_t graph_id, const uint32_t *device_id = nullptr);
|
||||
std::string GenerateDumpPath(const uint32_t *device_id = nullptr);
|
||||
|
||||
void GetFileKernelName(NotNull<std::string *> kernel_name);
|
||||
|
||||
|
|
|
@ -235,7 +235,7 @@ bool E2eDump::DumpData(const session::KernelGraph *graph, uint32_t device_id, co
|
|||
}
|
||||
MS_LOG(INFO) << "Start e2e dump. Current iteration is " << dump_json_parser.cur_dump_iter();
|
||||
MS_LOG(INFO) << "Current graph id is " << graph_id;
|
||||
std::string dump_path = GenerateDumpPath(graph_id, &device_id);
|
||||
std::string dump_path = GenerateDumpPath(&device_id);
|
||||
|
||||
DumpInput(graph, dump_path, debugger);
|
||||
DumpOutput(graph, dump_path, debugger);
|
||||
|
|
|
@ -82,14 +82,16 @@ def run_e2e_dump_bin():
|
|||
change_current_dump_json('e2e_dump_bin.json', dump_path)
|
||||
os.environ['MINDSPORE_DUMP_CONFIG'] = pwd + '/e2e_dump_bin.json'
|
||||
device_id = context.get_context("device_id")
|
||||
dump_file_path = dump_path + '/rank_{}/Net/graph_0/iteration_1/'.format(device_id)
|
||||
dump_file_path = dump_path
|
||||
if os.path.isdir(dump_path):
|
||||
shutil.rmtree(dump_path)
|
||||
add = Net()
|
||||
add(Tensor(x), Tensor(y))
|
||||
if context.get_context("device_target") == "Ascend":
|
||||
dump_file_path += '/Net/device_{}/iteration_1/'.format(device_id)
|
||||
output_name = "Default--Add-op1_output_0_shape_2_3_Float32_DefaultFormat.bin"
|
||||
else:
|
||||
dump_file_path += '/Net/iteration_1/'
|
||||
output_name = "Default--Add-op3_output_0_shape_2_3_Float32_DefaultFormat.bin"
|
||||
output_path = dump_file_path + output_name
|
||||
real_path = os.path.realpath(output_path)
|
||||
|
@ -107,14 +109,16 @@ def run_e2e_dump_npy():
|
|||
change_current_dump_json('e2e_dump_npy.json', dump_path)
|
||||
os.environ['MINDSPORE_DUMP_CONFIG'] = pwd + '/e2e_dump_npy.json'
|
||||
device_id = context.get_context("device_id")
|
||||
dump_file_path = dump_path + '/rank_{}/Net/graph_0/iteration_1/'.format(device_id)
|
||||
dump_file_path = dump_path
|
||||
if os.path.isdir(dump_path):
|
||||
shutil.rmtree(dump_path)
|
||||
add = Net()
|
||||
add(Tensor(x), Tensor(y))
|
||||
if context.get_context("device_target") == "Ascend":
|
||||
dump_file_path += '/Net/device_{}/iteration_1/'.format(device_id)
|
||||
output_name = "Default--Add-op1_output_0_shape_2_3_Float32_DefaultFormat.npy"
|
||||
else:
|
||||
dump_file_path += '/Net/iteration_1/'
|
||||
output_name = "Default--Add-op3_output_0_shape_2_3_Float32_DefaultFormat.npy"
|
||||
output_path = dump_file_path + output_name
|
||||
real_path = os.path.realpath(output_path)
|
||||
|
|
Loading…
Reference in New Issue