!23295 add dump and profiling warning log when task is not sink

Merge pull request !23295 from baihuawei/fixlog
This commit is contained in:
i-robot 2021-09-14 01:44:56 +00:00 committed by Gitee
commit d7388b40ab
7 changed files with 17 additions and 5 deletions

View File

@ -60,10 +60,16 @@ std::string GetIfstreamString(const std::ifstream &ifstream) {
}
bool DumpJsonParser::IsDumpEnabled() {
auto single_op = common::GetEnv(kGraphOpRun);
auto config_path = common::GetEnv(kMindsporeDumpConfig);
if (config_path.empty()) {
return false;
}
if (!single_op.empty() && single_op == "1") {
MS_LOG(WARNING) << "Dump is not supported when task is not sink. Please set env GRAPH_OP_RUN to 0 to enable task "
"sink, so that the data can be dumped.";
return false;
}
MS_LOG(INFO) << "Dump config path is " << config_path;
auto context = MsContext::GetInstance();

View File

@ -1265,7 +1265,7 @@ void InitHccl() {
uint32_t device_id = ms_context->get_param<uint32_t>(MS_CTX_DEVICE_ID);
#if ENABLE_D
bool task_sink = true;
auto single_op = std::getenv(kAttrGraphOpRun);
auto single_op = std::getenv(kGraphOpRun);
if (single_op && std::string(single_op) == "1") {
task_sink = false;
}

View File

@ -213,7 +213,8 @@ void KernelNotSupportException(const AnfNodePtr &kernel_node, const std::vector<
}
operator_info << ") ";
}
operator_info << "is not support.";
operator_info << "is not support. This error means the current type is not supported, please refer to the MindSpore "
"doc for supported types";
MS_EXCEPTION(TypeError) << operator_info.str() << " Trace: " << trace::DumpSourceLines(kernel_node);
}

View File

@ -471,7 +471,6 @@ constexpr auto kAttrMultiCallEnd = "multicall_end";
constexpr auto kAttrProfilingIterEnd = "PROFILING_ITER_END";
constexpr auto kAttrHiddenSize = "hidden_size";
constexpr auto kAttrInputSize = "input_size";
constexpr auto kAttrGraphOpRun = "GRAPH_OP_RUN";
// primal attr key name
constexpr auto kPrimalAttrForwardNodeName = "forward_node_name";
@ -480,6 +479,9 @@ constexpr auto kPrimalAttrForwardNodeName = "forward_node_name";
constexpr auto kValueTargetSwitch = "target_switch";
constexpr auto kValueTargetOther = "target_other";
// env key
constexpr auto kGraphOpRun = "GRAPH_OP_RUN";
// some size
const size_t kShape4dDims = 4;
const size_t kShape3dDims = 3;

View File

@ -602,7 +602,7 @@ BackendPtr CreateBackend() {
backend->set_is_multi_graph_sink(false);
context_ptr->set_param<bool>(MS_CTX_IS_MULTI_GRAPH_SINK, false);
} else {
auto single_op = std::getenv(kAttrGraphOpRun);
auto single_op = std::getenv(kGraphOpRun);
if (single_op && std::string(single_op) == "1") {
context_ptr->set_param<bool>(MS_CTX_ENABLE_TASK_SINK, false);
}

View File

@ -220,6 +220,9 @@ class Profiler:
raise TypeError("The parameter profile_memory must be bool")
if kwargs:
logger.warning("There are invalid params which don't work.")
task_sink = os.getenv("GRAPH_OP_RUN")
if task_sink and task_sink == "1":
logger.warning("Profiling is not supported when task is not sink.")
def analyse(self):
"""

View File

@ -408,7 +408,7 @@ class GradWrap(Cell):
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
@pytest.mark.env_single
def test_pynative_resnet50():
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")