!6375 Adapting new profiling performance data

Merge pull request !6375 from 张毅辉/zyh_profiling_adapt_aicpu_file
This commit is contained in:
mindspore-ci-bot 2020-09-17 11:57:24 +08:00 committed by Gitee
commit f53047e8f5
5 changed files with 32 additions and 16 deletions

View File

@ -632,7 +632,7 @@ DebuggerCommand GetCommand(const EventReply &reply) {
cmd = DebuggerCommand::kViewCMD;
break;
default:
MS_LOG(ERROR) << "Error: UnknownCMD";
MS_LOG(DEBUG) << "Debug: UnknownCMD";
break;
}
return cmd;

View File

@ -30,8 +30,8 @@ class DataPreProcessParser:
output_filename(str): The output data path and name.
"""
_source_file_target = 'DATA_PREPROCESS.dev.AICPU.'
_source_file_target_old = 'DATA_PREPROCESS.dev.AICPU.'
_source_file_target = 'DATA_PREPROCESS.AICPU.'
_dst_file_title = 'title:DATA_PREPROCESS AICPU'
_dst_file_column_title = ['serial_number', 'node_type_name', 'total_time(ms)',
'dispatch_time(ms)', 'run_start', 'run_end']
@ -53,8 +53,12 @@ class DataPreProcessParser:
"""Get log file name, which was created by ada service."""
file_name = get_file_join_name(self._input_path, self._source_file_target)
if not file_name:
data_path = os.path.join(self._input_path, "data")
file_name = get_file_join_name(data_path, self._source_file_target)
file_name = get_file_join_name(self._input_path, self._source_file_target_old)
if not file_name:
data_path = os.path.join(self._input_path, "data")
file_name = get_file_join_name(data_path, self._source_file_target)
if not file_name:
file_name = get_file_join_name(data_path, self._source_file_target_old)
return file_name
def _get_kernel_result(self, number, node_list, thread_list):

View File

@ -174,8 +174,8 @@ class FrameworkParser:
output_path (str): The directory of the parsed file. Default: `./`.
"""
_raw_data_dir = '/var/log/npu/profiling'
_regex_framework = r'Framework\.host\.(?P<data_type>.+)\.(?P<device_id>\d).+'
_regex_framework_in_data = r'Framework\.host\.(?P<data_type>.+)\.' \
_regex_framework = r'Framework\.(?P<data_type>.+)\.(?P<device_id>\d).+'
_regex_framework_in_data = r'Framework\.(?P<data_type>.+)\.' \
r'(?P<device_id>\d)\.(?P<profiling_id>[a-zA-Z0-9]+).+'
_col_names = [
'task_id', 'stream_id', 'block_dim', 'full_op_name', 'op_name',
@ -338,6 +338,7 @@ class FrameworkParser:
raise ProfilerDeviceIdMismatchException()
data_type = attrs.get('data_type')
data_type = data_type.replace("host.", "")
if data_type.startswith('vm.'):
if self._backend_type and self._backend_type != 'vm':
raise ProfilerRawFileException('Backend type is inconsistent.')
@ -395,6 +396,7 @@ class FrameworkParser:
raise ProfilerDeviceIdMismatchException()
data_type = attrs.get('data_type')
data_type = data_type.replace("host.", "")
if data_type.startswith('vm.'):
if self._backend_type and self._backend_type != 'vm':
raise ProfilerRawFileException('Backend type is inconsistent.')

View File

@ -28,7 +28,8 @@ class HWTSLogParser:
output_filename (str): The output data path and name. Such as: './output_format_data_hwts_0.txt'.
"""
_source_file_target = 'hwts.log.data.45.dev.profiler_default_tag'
_source_file_target_old = 'hwts.log.data.45.dev.profiler_default_tag'
_source_file_target = 'hwts.data'
_dst_file_title = 'title:45 HWTS data'
_dst_file_column_title = 'Type cnt Core_ID Block_ID Task_ID Cycle_counter Stream_ID'
@ -42,11 +43,15 @@ class HWTSLogParser:
file_name = get_file_join_name(self._input_path, self._source_file_target)
if not file_name:
data_path = os.path.join(self._input_path, "data")
file_name = get_file_join_name(data_path, self._source_file_target)
file_name = get_file_join_name(self._input_path, self._source_file_target_old)
if not file_name:
msg = "Fail to find hwts log file, under profiling directory"
raise RuntimeError(msg)
data_path = os.path.join(self._input_path, "data")
file_name = get_file_join_name(data_path, self._source_file_target)
if not file_name:
file_name = get_file_join_name(data_path, self._source_file_target_old)
if not file_name:
msg = "Fail to find hwts log file, under profiling directory"
raise RuntimeError(msg)
return file_name

View File

@ -78,14 +78,19 @@ class MinddataParser:
"""
col_names = ["node_name", "start_time", "end_time", "queue_size"]
minddata_aicpu_source_path = get_file_join_name(
input_path=source_path, file_name='DATA_PREPROCESS.dev.AICPUMI')
input_path=source_path, file_name='DATA_PREPROCESS.AICPUMI')
if not minddata_aicpu_source_path:
minddata_aicpu_source_path = get_file_join_name(
input_path=os.path.join(source_path, "data"), file_name='DATA_PREPROCESS.dev.AICPUMI')
input_path=source_path, file_name='DATA_PREPROCESS.dev.AICPUMI')
if not minddata_aicpu_source_path:
return
minddata_aicpu_source_path = get_file_join_name(
input_path=os.path.join(source_path, "data"), file_name='DATA_PREPROCESS.AICPUMI')
if not minddata_aicpu_source_path:
minddata_aicpu_source_path = get_file_join_name(
input_path=os.path.join(source_path, "data"), file_name='DATA_PREPROCESS.dev.AICPUMI')
if not minddata_aicpu_source_path:
return
minddata_aicpu_output_path = os.path.join(output_path, "minddata_aicpu_" + device_id + ".txt")
minddata_aicpu_data = MinddataParser.parse_minddata_aicpu_data(minddata_aicpu_source_path)
if minddata_aicpu_data:
fwrite_format(minddata_aicpu_output_path, " ".join(col_names), is_start=True)