!45064 [AutoParallel] fix parameter clone bug

Merge pull request !45064 from lichen/fix_parameter_clone_bug
This commit is contained in:
i-robot 2022-11-04 02:20:21 +00:00 committed by Gitee
commit f52db070a1
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
3 changed files with 43 additions and 2 deletions

View File

@ -432,7 +432,7 @@ void InitOptimizerState(const FuncGraphPtr &root) {
auto graph_executor = pipeline::GraphExecutorPy::GetInstance();
MS_EXCEPTION_IF_NULL(graph_executor);
auto phase = graph_executor->phase();
auto py_obj = GetPyParameterObj(param_info, CLONED_OBJ);
auto py_obj = GetPyParameterObj(param_info, OBJ);
if (py::isinstance<py::none>(py_obj)) {
MS_LOG(WARNING) << "Parameter: " << parameter->DebugString() << " can't find python obj.";
continue;

View File

@ -491,7 +491,7 @@ class Parameter(Tensor_):
else:
info.cloned_obj = [x]
self.param_info = info
param_info_clone.cloned_obj = x
param_info_clone.obj = x
x.param_info = param_info_clone
x.is_init = False
x.init = self.init

View File

@ -0,0 +1,41 @@
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mindspore import Tensor
from mindspore import Parameter
from mindspore.common.initializer import Normal
import mindspore as ms
def test_parameter_clone():
"""
Feature: test parameter clone api
Description: assert data and repr
Expectation: success
"""
tensor = Tensor(input_data=None, shape=(16, 32), dtype=ms.float32, init=Normal())
param = Parameter(tensor, requires_grad=False)
param2 = param.clone()
data1 = param.asnumpy()
data2 = param2.asnumpy()
repr1 = repr(param2)
assert (data1 == data2).all()
assert "requires_grad=False" in repr1
assert "shape=(16, 32)" in repr1
param3 = param2.clone()
data3 = param3.asnumpy()
repr2 = repr(param3)
assert (data1 == data3).all()
assert repr1 == repr2