diff --git a/mindspore/ccsrc/frontend/optimizer/irpass/reshape_eliminate.h b/mindspore/ccsrc/frontend/optimizer/irpass/reshape_eliminate.h index a3c9fadd1a1..98db55063d5 100644 --- a/mindspore/ccsrc/frontend/optimizer/irpass/reshape_eliminate.h +++ b/mindspore/ccsrc/frontend/optimizer/irpass/reshape_eliminate.h @@ -98,7 +98,6 @@ class TwoReshapeEliminater : public AnfVisitor { if (node->scope() != kDefaultScope) { new_node->set_scope(node->scope()); } - new_node->set_fullname_with_scope(node->fullname_with_scope()); return new_node; } return nullptr; diff --git a/mindspore/core/ir/func_graph_cloner.cc b/mindspore/core/ir/func_graph_cloner.cc index f37a2542e1e..adf0afffb4f 100644 --- a/mindspore/core/ir/func_graph_cloner.cc +++ b/mindspore/core/ir/func_graph_cloner.cc @@ -77,7 +77,7 @@ void Cloner::CloneParameter(const AnfNodePtr &node, const FuncGraphPtr &target, // Default parameter can be shared since it is readonly. new_param->set_default_param(old_param->default_param()); } - ScopePtr scope = (node->scope() != kDefaultScope) ? node->scope() : this->scope(); + ScopePtr scope = ((node->scope() == kDefaultScope) && (this->scope() != nullptr)) ? this->scope() : node->scope(); new_param->set_scope(scope); repl_node_[node] = new_param; } @@ -89,11 +89,8 @@ void Cloner::CloneCNode(const AnfNodePtr &node, const FuncGraphPtr &target) { CNodePtr new_node = std::make_shared(AnfNodePtrList{}, target); auto old_node = node->cast(); new_node->CloneCNodeInfo(old_node); - ScopePtr scope = (node->scope() != kDefaultScope) ? node->scope() : this->scope(); + ScopePtr scope = ((node->scope() == kDefaultScope) && (this->scope() != nullptr)) ? this->scope() : node->scope(); new_node->set_scope(scope); - if (IsParallelConsiderCNode(old_node) && new_node->scope() == kDefaultScope) { - new_node->set_fullname_with_scope(old_node->fullname_with_scope()); - } repl_node_[old_node] = new_node; nodes_.emplace_back(old_node, new_node); } @@ -102,7 +99,7 @@ void Cloner::CloneValueNode(const AnfNodePtr &node) { MS_EXCEPTION_IF_NULL(node); TraceGuard trace_guard(node->debug_info(), relation_); ValueNodePtr new_const = NewValueNode(GetValueNode(node)); - ScopePtr scope = (node->scope() != kDefaultScope) ? node->scope() : this->scope(); + ScopePtr scope = ((node->scope() == kDefaultScope) && (this->scope() != nullptr)) ? this->scope() : node->scope(); new_const->set_scope(scope); new_const->set_abstract(node->abstract()); new_const->set_has_new_value(node->cast()->has_new_value()); @@ -114,7 +111,7 @@ void Cloner::CloneValueNode(const AnfNodePtr &node, const FuncGraphPtr &target) MS_EXCEPTION_IF_NULL(target); TraceGuard trace_guard(node->debug_info(), relation_); ValueNodePtr new_const = NewValueNode(target); - ScopePtr scope = (node->scope() != kDefaultScope) ? node->scope() : this->scope(); + ScopePtr scope = ((node->scope() == kDefaultScope) && (this->scope() != nullptr)) ? this->scope() : node->scope(); new_const->set_scope(scope); new_const->set_abstract(node->abstract()); new_const->set_has_new_value(node->cast()->has_new_value()); diff --git a/tests/st/dump/test_data_dump.py b/tests/st/dump/test_data_dump.py index f5c5ffc3973..a410151d2e2 100644 --- a/tests/st/dump/test_data_dump.py +++ b/tests/st/dump/test_data_dump.py @@ -85,13 +85,13 @@ def run_e2e_dump(): add(Tensor(x), Tensor(y)) if context.get_context("device_target") == "Ascend": assert len(os.listdir(dump_file_path)) == 5 - output_name = "Add.Add-op1.0.0.*.output.0.DefaultFormat.npy" + output_name = "Add.Add-op*.0.0.*.output.0.DefaultFormat.npy" elif context.get_context("device_target") == "CPU": assert len(os.listdir(dump_file_path)) == 5 - output_name = "Add.Add-op3.0.0.*.output.0.DefaultFormat.npy" + output_name = "Add.Add-op*.0.0.*.output.0.DefaultFormat.npy" else: assert len(os.listdir(dump_file_path)) == 3 - output_name = "Add.Add-op3.0.0.*.output.0.DefaultFormat.npy" + output_name = "Add.Add-op*.0.0.*.output.0.DefaultFormat.npy" output_path = glob.glob(os.path.join(dump_file_path, output_name))[0] real_path = os.path.realpath(output_path) output = np.load(real_path) diff --git a/tests/ut/python/parallel/test_auto_parallel_cast.py b/tests/ut/python/parallel/test_auto_parallel_cast.py index f9e988949d4..a67bf7f9eb7 100644 --- a/tests/ut/python/parallel/test_auto_parallel_cast.py +++ b/tests/ut/python/parallel/test_auto_parallel_cast.py @@ -84,9 +84,9 @@ def test_double_star_graph(): net.set_train() _executor.compile(net, x, y, z, w, phase='train') strategies = _executor._get_shard_strategy(net) - expected_strategies = {'Default/network-Net/Cast-op2': [[8, 1]], - 'Default/network-Net/Cast-op4': [[1, 8]], - 'Default/network-Net/MatMul-op3': [[8, 1], [1, 1]], - 'Default/network-Net/MatMul-op5': [[1, 1], [1, 8]], - 'Default/network-Net/MatMul-op1': [[1, 8], [8, 1]]} + expected_strategies = {'Default/network-Net/Cast-op1': [[8, 1]], + 'Default/network-Net/Cast-op3': [[1, 8]], + 'Default/network-Net/MatMul-op2': [[8, 1], [1, 1]], + 'Default/network-Net/MatMul-op4': [[1, 1], [1, 8]], + 'Default/network-Net/MatMul-op0': [[1, 8], [8, 1]]} assert strategies == expected_strategies diff --git a/tests/ut/python/parallel/test_auto_parallel_transpose.py b/tests/ut/python/parallel/test_auto_parallel_transpose.py index 1f967ba5500..a4d1375db9e 100644 --- a/tests/ut/python/parallel/test_auto_parallel_transpose.py +++ b/tests/ut/python/parallel/test_auto_parallel_transpose.py @@ -79,8 +79,8 @@ def test_two_matmul_transpose(): net.set_train() _executor.compile(net, x, y, b, phase='train') strategies = _executor._get_shard_strategy(net) - expected_strategies = {'Default/network-Net/Transpose-op1': [[1, 16]], - 'Default/network-Net/Transpose-op2': [[16, 1]], + expected_strategies = {'Default/network-Net/Transpose-op0': [[1, 16]], + 'Default/network-Net/Transpose-op1': [[16, 1]], 'Default/network-Net/MatMul-op3': [[16, 1], [1, 1]], - 'Default/network-Net/MatMul-op4': [[16, 1], [1, 1]]} + 'Default/network-Net/MatMul-op2': [[16, 1], [1, 1]]} assert strategies == expected_strategies