forked from mindspore-Ecosystem/mindspore
!23295 add dump and profiling warning log when task is not sink
Merge pull request !23295 from baihuawei/fixlog
This commit is contained in:
commit
d7388b40ab
|
@ -60,10 +60,16 @@ std::string GetIfstreamString(const std::ifstream &ifstream) {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DumpJsonParser::IsDumpEnabled() {
|
bool DumpJsonParser::IsDumpEnabled() {
|
||||||
|
auto single_op = common::GetEnv(kGraphOpRun);
|
||||||
auto config_path = common::GetEnv(kMindsporeDumpConfig);
|
auto config_path = common::GetEnv(kMindsporeDumpConfig);
|
||||||
if (config_path.empty()) {
|
if (config_path.empty()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
if (!single_op.empty() && single_op == "1") {
|
||||||
|
MS_LOG(WARNING) << "Dump is not supported when task is not sink. Please set env GRAPH_OP_RUN to 0 to enable task "
|
||||||
|
"sink, so that the data can be dumped.";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
MS_LOG(INFO) << "Dump config path is " << config_path;
|
MS_LOG(INFO) << "Dump config path is " << config_path;
|
||||||
|
|
||||||
auto context = MsContext::GetInstance();
|
auto context = MsContext::GetInstance();
|
||||||
|
|
|
@ -1265,7 +1265,7 @@ void InitHccl() {
|
||||||
uint32_t device_id = ms_context->get_param<uint32_t>(MS_CTX_DEVICE_ID);
|
uint32_t device_id = ms_context->get_param<uint32_t>(MS_CTX_DEVICE_ID);
|
||||||
#if ENABLE_D
|
#if ENABLE_D
|
||||||
bool task_sink = true;
|
bool task_sink = true;
|
||||||
auto single_op = std::getenv(kAttrGraphOpRun);
|
auto single_op = std::getenv(kGraphOpRun);
|
||||||
if (single_op && std::string(single_op) == "1") {
|
if (single_op && std::string(single_op) == "1") {
|
||||||
task_sink = false;
|
task_sink = false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -213,7 +213,8 @@ void KernelNotSupportException(const AnfNodePtr &kernel_node, const std::vector<
|
||||||
}
|
}
|
||||||
operator_info << ") ";
|
operator_info << ") ";
|
||||||
}
|
}
|
||||||
operator_info << "is not support.";
|
operator_info << "is not support. This error means the current type is not supported, please refer to the MindSpore "
|
||||||
|
"doc for supported types";
|
||||||
MS_EXCEPTION(TypeError) << operator_info.str() << " Trace: " << trace::DumpSourceLines(kernel_node);
|
MS_EXCEPTION(TypeError) << operator_info.str() << " Trace: " << trace::DumpSourceLines(kernel_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -471,7 +471,6 @@ constexpr auto kAttrMultiCallEnd = "multicall_end";
|
||||||
constexpr auto kAttrProfilingIterEnd = "PROFILING_ITER_END";
|
constexpr auto kAttrProfilingIterEnd = "PROFILING_ITER_END";
|
||||||
constexpr auto kAttrHiddenSize = "hidden_size";
|
constexpr auto kAttrHiddenSize = "hidden_size";
|
||||||
constexpr auto kAttrInputSize = "input_size";
|
constexpr auto kAttrInputSize = "input_size";
|
||||||
constexpr auto kAttrGraphOpRun = "GRAPH_OP_RUN";
|
|
||||||
|
|
||||||
// primal attr key name
|
// primal attr key name
|
||||||
constexpr auto kPrimalAttrForwardNodeName = "forward_node_name";
|
constexpr auto kPrimalAttrForwardNodeName = "forward_node_name";
|
||||||
|
@ -480,6 +479,9 @@ constexpr auto kPrimalAttrForwardNodeName = "forward_node_name";
|
||||||
constexpr auto kValueTargetSwitch = "target_switch";
|
constexpr auto kValueTargetSwitch = "target_switch";
|
||||||
constexpr auto kValueTargetOther = "target_other";
|
constexpr auto kValueTargetOther = "target_other";
|
||||||
|
|
||||||
|
// env key
|
||||||
|
constexpr auto kGraphOpRun = "GRAPH_OP_RUN";
|
||||||
|
|
||||||
// some size
|
// some size
|
||||||
const size_t kShape4dDims = 4;
|
const size_t kShape4dDims = 4;
|
||||||
const size_t kShape3dDims = 3;
|
const size_t kShape3dDims = 3;
|
||||||
|
|
|
@ -602,7 +602,7 @@ BackendPtr CreateBackend() {
|
||||||
backend->set_is_multi_graph_sink(false);
|
backend->set_is_multi_graph_sink(false);
|
||||||
context_ptr->set_param<bool>(MS_CTX_IS_MULTI_GRAPH_SINK, false);
|
context_ptr->set_param<bool>(MS_CTX_IS_MULTI_GRAPH_SINK, false);
|
||||||
} else {
|
} else {
|
||||||
auto single_op = std::getenv(kAttrGraphOpRun);
|
auto single_op = std::getenv(kGraphOpRun);
|
||||||
if (single_op && std::string(single_op) == "1") {
|
if (single_op && std::string(single_op) == "1") {
|
||||||
context_ptr->set_param<bool>(MS_CTX_ENABLE_TASK_SINK, false);
|
context_ptr->set_param<bool>(MS_CTX_ENABLE_TASK_SINK, false);
|
||||||
}
|
}
|
||||||
|
|
|
@ -220,6 +220,9 @@ class Profiler:
|
||||||
raise TypeError("The parameter profile_memory must be bool")
|
raise TypeError("The parameter profile_memory must be bool")
|
||||||
if kwargs:
|
if kwargs:
|
||||||
logger.warning("There are invalid params which don't work.")
|
logger.warning("There are invalid params which don't work.")
|
||||||
|
task_sink = os.getenv("GRAPH_OP_RUN")
|
||||||
|
if task_sink and task_sink == "1":
|
||||||
|
logger.warning("Profiling is not supported when task is not sink.")
|
||||||
|
|
||||||
def analyse(self):
|
def analyse(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -408,7 +408,7 @@ class GradWrap(Cell):
|
||||||
@pytest.mark.level1
|
@pytest.mark.level1
|
||||||
@pytest.mark.platform_arm_ascend_training
|
@pytest.mark.platform_arm_ascend_training
|
||||||
@pytest.mark.platform_x86_ascend_training
|
@pytest.mark.platform_x86_ascend_training
|
||||||
@pytest.mark.env_onecard
|
@pytest.mark.env_single
|
||||||
def test_pynative_resnet50():
|
def test_pynative_resnet50():
|
||||||
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
|
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue