[profiler] Fix profiling_parallel.pb.h compile problems

This commit is contained in:
臧庆香 2022-12-05 18:50:21 +08:00
parent 375750d4b5
commit 9e8a364e7d
5 changed files with 24 additions and 14 deletions

View File

@ -23,6 +23,7 @@
#include "mindspore/core/utils/file_utils.h"
#include "google/protobuf/util/json_util.h"
#include "nlohmann/json.hpp"
#include "proto/profiling_parallel.pb.h"
#ifdef WITH_BACKEND
#include "ps/ps_context.h"
@ -75,14 +76,14 @@ bool ParallelStrategy::StringToInt(std::string *str, int32_t *value) {
return true;
}
irpb::ProfilingParallel ParallelStrategy::GetProfilingParallel(const FuncGraphPtr &func_graph) {
irpb::ProfilingParallel profiling_parallel;
std::shared_ptr<irpb::ProfilingParallel> ParallelStrategy::GetProfilingParallel() {
std::shared_ptr<irpb::ProfilingParallel> profiling_parallel = std::make_shared<irpb::ProfilingParallel>();
// set parallel model
auto parallel_context = parallel::ParallelContext::GetInstance();
MS_EXCEPTION_IF_NULL(parallel_context);
std::string parallel_mode = parallel_context->parallel_mode();
irpb::Config *config = profiling_parallel.mutable_config();
irpb::Config *config = profiling_parallel->mutable_config();
MS_EXCEPTION_IF_NULL(config);
config->set_parallel_type(parallel_mode);
@ -138,7 +139,7 @@ void ParallelStrategy::DumpProfileParallelStrategy(const FuncGraphPtr &func_grap
MS_LOG(INFO) << "Start to DumpProfileParallelStrategy.";
cache_profiling_parallel_pb_ = GetProfilingParallel(func_graph);
cache_profiling_parallel_pb_ = GetProfilingParallel();
graph_proto_str_ = GetFuncGraphProtoJsonString(func_graph);
auto ascend_profiler = Profiler::GetInstance(kAscendDevice);
@ -165,7 +166,7 @@ void ParallelStrategy::SaveParallelStrategyToFile() {
rank_id = "0";
}
std::string parallel_str;
(void)google::protobuf::util::MessageToJsonString(cache_profiling_parallel_pb_, &parallel_str);
(void)google::protobuf::util::MessageToJsonString(*cache_profiling_parallel_pb_, &parallel_str);
std::string parallel_file = std::string("parallel_strategy_") + std::string(rank_id) + std::string(".json");
std::string parallel_path = dir + "/" + parallel_file;
MS_LOG(INFO) << "Start to write parallel strategy string, file path is " << parallel_path;
@ -189,17 +190,16 @@ void ParallelStrategy::SaveParallelStrategyToFile() {
std::string ParallelStrategy::GetParallelStrategyForReport() {
bool parallel_data_save_status = has_got_parallel_strategy_data_;
std::string report_data;
irpb::ProfilingParallel profiling_parallel;
std::shared_ptr<irpb::ProfilingParallel> profiling_parallel;
if (has_got_parallel_strategy_data_) {
profiling_parallel = cache_profiling_parallel_pb_;
} else {
FuncGraphPtr func_graph = nullptr;
profiling_parallel = GetProfilingParallel(func_graph);
profiling_parallel = GetProfilingParallel();
}
auto parallel_context = parallel::ParallelContext::GetInstance();
MS_EXCEPTION_IF_NULL(parallel_context);
(void)google::protobuf::util::MessageToJsonString(profiling_parallel, &report_data);
(void)google::protobuf::util::MessageToJsonString(*profiling_parallel, &report_data);
try {
nlohmann::json report_content = nlohmann::json::parse(report_data);
report_content["config"]["ai_framework_type"] = "MindSpore";

View File

@ -22,9 +22,11 @@
#include "ir/func_graph.h"
#include "base/base.h"
#include "include/backend/visible.h"
#include "proto/profiling_parallel.pb.h"
namespace mindspore {
namespace irpb {
class ProfilingParallel;
}
namespace profiler {
namespace ascend {
class ParallelStrategy {
@ -37,14 +39,14 @@ class ParallelStrategy {
std::string GetParallelStrategyForReport();
private:
irpb::ProfilingParallel GetProfilingParallel(const FuncGraphPtr &func_graph);
std::shared_ptr<irpb::ProfilingParallel> GetProfilingParallel();
bool IsProfilingParallelStrategyEnabled();
bool StringToInt(std::string *str, int32_t *value);
static std::shared_ptr<ParallelStrategy> parallel_strategy_inst_;
bool has_save_parallel_strategy_ = false;
bool has_got_parallel_strategy_data_ = false;
irpb::ProfilingParallel cache_profiling_parallel_pb_;
std::shared_ptr<irpb::ProfilingParallel> cache_profiling_parallel_pb_;
std::string graph_proto_str_;
};
} // namespace ascend

View File

@ -32,7 +32,7 @@ class HWTSLogParser:
GRAPH_MODE_MAX_TASKID = 65000
_source_file_target_old = 'hwts.log.data.45.dev.profiler_default_tag'
_source_file_target = 'hwts.data'
_source_file_target = 'hwts.data.'
_dst_file_title = 'title:45 HWTS data'
_dst_file_column_title = 'Type cnt Core_ID Block_ID Task_ID Cycle_counter Stream_ID'
@ -66,7 +66,7 @@ class HWTSLogParser:
if not line.strip():
continue
if len(line) < 64:
logger.warning("Length of hwts data is less than 64")
logger.error("Length of hwts data is less than 64, it is %s", len(line))
continue
byte_first_four = struct.unpack('BBHHH', line[0:8])
byte_first = bin(byte_first_four[0]).replace('0b', '').zfill(8)

View File

@ -18,6 +18,7 @@ import json
import os
from decimal import Decimal
from enum import Enum
import sys
from mindspore import log as logger
from mindspore.profiler.common.exceptions.exceptions import ProfilerRawFileException
@ -25,6 +26,9 @@ from mindspore.profiler.common.util import query_latest_trace_time_file, to_int,
from mindspore.profiler.common.validator.validate_path import validate_and_normalize_path
csv.field_size_limit(sys.maxsize)
class Integrator:
"""
The integrator for integrating parsed profiling files.

View File

@ -706,6 +706,8 @@ class Profiler:
if GlobalComm.INITED:
self._rank_size = get_group_size()
else:
self._rank_size = os.getenv('RANK_SIZE')
if self._has_started:
self.stop()
@ -895,6 +897,8 @@ class Profiler:
if GlobalComm.INITED:
self._rank_size = get_group_size()
else:
self._rank_size = os.getenv('RANK_SIZE')
if self._has_started:
self.stop()