!27449 [ME]Add `MS` header of env var

Merge pull request !27449 from chenfei_mindspore/environment_variable_fix
This commit is contained in:
i-robot 2021-12-13 08:03:45 +00:00 committed by Gitee
commit 7d7ec3c0c6
11 changed files with 22 additions and 22 deletions

View File

@ -134,7 +134,7 @@ def get_bprop_method_of_class(obj, parse_method=None):
# The fallback feature is enabled in default.
# Not support change the flag during the process is alive.
support_fallback_ = os.getenv('DEV_ENV_ENABLE_FALLBACK')
support_fallback_ = os.getenv('MS_DEV_ENABLE_FALLBACK')
def resolve_symbol(namespace, symbol):
@ -519,7 +519,7 @@ def eval_script(exp_str, params):
obj = eval(exp_str, global_params, local_params)
except Exception as e:
error_info = f"When eval '{exp_str}' by using Fallback feature, an error occurred: " + str(e) + \
". You can try to turn off the Fallback feature by 'export DEV_ENV_ENABLE_FALLBACK=0'."
". You can try to turn off the Fallback feature by 'export MS_DEV_ENABLE_FALLBACK=0'."
logger.error(error_info)
raise e

View File

@ -635,7 +635,7 @@ DumpConfig GetDumpConfig() {
}
parsed = true;
// Start parse config.
std::string str(common::GetEnv("DEV_ENV_DUMP_IR_CONFIG"));
std::string str(common::GetEnv("MS_DEV_DUMP_IR_CONFIG"));
std::vector<std::shared_ptr<HashSet<std::string>>> configs = {std::make_shared<HashSet<std::string>>(),
std::make_shared<HashSet<std::string>>(),
std::make_shared<HashSet<std::string>>()};

View File

@ -338,8 +338,8 @@ bool SubstitutionList::operator()(const FuncGraphPtr &func_graph, const Optimize
manager->AddFuncGraph(func_graph);
bool changes = false;
static const auto traverse_mode =
(common::GetEnv("DEV_ENV_TRAVERSE_SUBSTITUTIONS_MODE") != "1" ? kOptTraverseFromIRToSubstitutions
: kOptTraverseFromSubstitutionsToIR);
(common::GetEnv("MS_DEV_TRAVERSE_SUBSTITUTIONS_MODE") != "1" ? kOptTraverseFromIRToSubstitutions
: kOptTraverseFromSubstitutionsToIR);
if (traverse_mode == kOptTraverseFromIRToSubstitutions &&
MsContext::GetInstance()->get_param<int>(MS_CTX_EXECUTION_MODE) != kPynativeMode &&
optimizer->traverse_nodes_first() && !is_once_ && !global_sensitive_) {

View File

@ -384,7 +384,7 @@ ValuePtr ConvertOtherObj(const py::object &obj) {
// Start RESOLVE_TYPE_INVALID...
// The fallback feature is enabled in default.
// Not support change the flag during the process is alive.
static const auto support_fallback = common::GetEnv("DEV_ENV_ENABLE_FALLBACK");
static const auto support_fallback = common::GetEnv("MS_DEV_ENABLE_FALLBACK");
static const auto use_fallback = (support_fallback != "0");
if (use_fallback) {
auto res = std::make_shared<InterpretedObject>(obj, py::str(obj));

View File

@ -92,8 +92,8 @@ AnfNodePtr GetMixedPrecisionCastHelp(const FuncGraphPtr &func_graph, const AnfNo
FuncGraphWeakPtr Parser::top_func_graph_ = FuncGraphWeakPtr();
Parser::Parser(const std::shared_ptr<ParseFunctionAst> &ast) : ast_(ast) {
max_for_loop_count_str_ = common::GetEnv("DEV_ENV_FOR_TO_WHILE_LOOP");
support_fallback_ = common::GetEnv("DEV_ENV_ENABLE_FALLBACK");
max_for_loop_count_str_ = common::GetEnv("MS_DEV_FOR_TO_WHILE_LOOP");
support_fallback_ = common::GetEnv("MS_DEV_ENABLE_FALLBACK");
errcode_ = PARSE_SUCCESS;
BuildMethodMap();
}

View File

@ -59,7 +59,7 @@ abstract::AbstractBasePtr ClassType::ToAbstract() {
// The fallback feature is enabled in default.
// Not support change the flag during the process is alive.
static const auto support_fallback = common::GetEnv("DEV_ENV_ENABLE_FALLBACK");
static const auto support_fallback = common::GetEnv("MS_DEV_ENABLE_FALLBACK");
static const auto use_fallback = (support_fallback != "0");
if (use_fallback && !IsSupportedCreateInstanceType(obj())) {
return abs_scalar;

View File

@ -573,7 +573,7 @@ EvalResultPtr AnalysisEngine::ExecuteEvaluators(const std::vector<EvaluatorPtr>
MS_EXCEPTION_IF_NULL(eval);
return eval->Run(shared_from_this(), args_conf_list, out_conf);
}
static bool enable_singleThread = (common::GetEnv("DEV_ENV_SINGLE_EVAL") == "1");
static bool enable_singleThread = (common::GetEnv("MS_DEV_SINGLE_EVAL") == "1");
if (enable_singleThread) {
return ExecuteMultipleEvaluators(evaluators, out_conf, args_conf_list);
} else {

View File

@ -257,7 +257,7 @@ class AnalysisEngine : public std::enable_shared_from_this<AnalysisEngine> {
AnalysisEngine(const PrimEvaluatorMap &prim_evaluator_map, const FuncGraphManagerPtr &func_graph_manager)
: prim_constructors_(prim_evaluator_map), func_graph_manager_(func_graph_manager) {
forward_count_ = 0;
enable_recursive_eval_ = (common::GetEnv("ENV_RECURSIVE_EVAL") == "1");
enable_recursive_eval_ = (common::GetEnv("MS_DEV_RECURSIVE_EVAL") == "1");
}
~AnalysisEngine() = default;

View File

@ -24,7 +24,7 @@
namespace mindspore {
namespace label_manage {
static TraceLabelType global_trace_type = (common::GetEnv("ENV_TRACE_LABEL_WITH_UNIQUE_ID") == "1")
static TraceLabelType global_trace_type = (common::GetEnv("MS_DEV_TRACE_LABEL_WITH_UNIQUE_ID") == "1")
? TraceLabelType::kWithUniqueId
: TraceLabelType::kShortSymbol;
TraceLabelType GetGlobalTraceLabelType() { return global_trace_type; }

View File

@ -53,7 +53,7 @@ def test_single_for_01():
y = Tensor([5], mstype.int32)
z = Tensor([4], mstype.int32)
os.environ['DEV_ENV_FOR_TO_WHILE_LOOP'] = '1'
os.environ['MS_DEV_FOR_TO_WHILE_LOOP'] = '1'
# graph mode
context.set_context(mode=context.GRAPH_MODE)
for_net = SingleForNet()
@ -67,7 +67,7 @@ def test_single_for_01():
net = GradNet(for_net)
pynative_forward_res = for_net(x, y, z)
pynative_backward_res = net(x, y, z)
os.environ['DEV_ENV_FOR_TO_WHILE_LOOP'] = ''
os.environ['MS_DEV_FOR_TO_WHILE_LOOP'] = ''
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res

View File

@ -923,13 +923,13 @@ def test_large_for_loop():
t = Tensor(np.ones([2, 3], dtype=np.float32))
net = Net()
os.environ['ENV_RECURSIVE_EVAL'] = '1'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '1'
old_max_call_depth = context.get_context('max_call_depth')
context.set_context(max_call_depth=60)
with pytest.raises(RuntimeError) as err:
net(t)
context.set_context(max_call_depth=old_max_call_depth)
os.environ['ENV_RECURSIVE_EVAL'] = '0'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '0'
assert 'Exceed function call depth limit 60' in str(err.value)
@ -958,12 +958,12 @@ def test_large_for_loop_case2():
x = Tensor(np.ones([2, 3], dtype=np.float32))
net = Menet(axis=0, flag_boottom=True, flag_top=True)
os.environ['ENV_RECURSIVE_EVAL'] = '1'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '1'
old_max_call_depth = context.get_context('max_call_depth')
context.set_context(max_call_depth=80)
with pytest.raises(RuntimeError) as err:
net(x)
os.environ['ENV_RECURSIVE_EVAL'] = '0'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '0'
context.set_context(max_call_depth=old_max_call_depth)
assert 'Exceed function call depth limit 80' in str(err.value)
@ -986,13 +986,13 @@ def test_large_for_loop_with_continue_break():
x = self.flatten(x + elem1)
return x
os.environ['ENV_RECURSIVE_EVAL'] = '1'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '1'
old_max_call_depth = context.get_context('max_call_depth')
context.set_context(max_call_depth=2000)
t = Tensor(np.ones([2, 3], dtype=np.float32))
net = Net()
net(t)
os.environ['ENV_RECURSIVE_EVAL'] = '0'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '0'
context.set_context(max_call_depth=old_max_call_depth)
@ -1023,14 +1023,14 @@ def test_recursive_call():
return out
context.set_context(mode=context.GRAPH_MODE)
os.environ['ENV_RECURSIVE_EVAL'] = '1'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '1'
old_max_call_depth = context.get_context('max_call_depth')
context.set_context(max_call_depth=80)
input_data = Tensor(np.identity(10).astype(np.float32))
net = Net2()
with pytest.raises(RuntimeError):
net(input_data)
os.environ['ENV_RECURSIVE_EVAL'] = '0'
os.environ['MS_DEV_RECURSIVE_EVAL'] = '0'
context.set_context(max_call_depth=old_max_call_depth)