Add Parameter input test cases, remove side effect propagate for Identity op, and fix compile performance problem.

This commit is contained in:
Zhang Qinghua 2022-05-26 10:41:38 +08:00
parent fec45b86b2
commit 31ab2accae
8 changed files with 308 additions and 72 deletions

View File

@ -1,7 +1,7 @@
/**
* This is the C++ adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
*
* Copyright 2021 Huawei Technologies Co., Ltd
* Copyright 2021-2022 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -575,8 +575,14 @@ PynativeAdjointPtr KPynativeCellImpl::ForgeMakeSequenceAdjoint(const CNodePtr &c
} else if (input->isa<ValueNode>()) {
const auto &input_value = GetValueNode(input);
op_args.push_back(input_value);
} else if (input->isa<Parameter>()) {
const auto input_parameter = dyn_cast<Parameter>(input);
MS_EXCEPTION_IF_NULL(input_parameter);
const auto &input_value = input_parameter->default_param();
op_args.push_back(input_value);
} else {
MS_LOG(EXCEPTION) << "Input of MakeTuple/MakeLis is not a CNode or ValueNode, but: " << input->DebugString();
MS_LOG(EXCEPTION) << "The input of MakeTuple/MakeList is not a CNode, ValueNode or Parameter, but "
<< input->DebugString();
}
} else {
op_args.push_back(input_adjoint_iter->second->out());

View File

@ -560,16 +560,6 @@ namespace {
abstract::AbstractBasePtrList GetArgsAbs(const ResourcePtr &resource) {
FuncGraphPtr func_graph = resource->func_graph();
abstract::AbstractBasePtrList args_abs = resource->args_abs();
// Check if any Parameter object used as construct()/ms_function arguments.
std::set<std::string> args_abs_parameters;
for (auto const &abs : args_abs) {
auto abs_ref = dyn_cast<abstract::AbstractRefTensor>(abs);
if (abs_ref != nullptr) {
auto ref_key = dyn_cast<StringImm>(abs_ref->ref_key_value());
MS_EXCEPTION_IF_NULL(ref_key);
args_abs_parameters.emplace(ref_key->value());
}
}
// Parallel checking.
auto context = parallel::ParallelContext::GetInstance();
@ -586,13 +576,6 @@ abstract::AbstractBasePtrList GetArgsAbs(const ResourcePtr &resource) {
MS_EXCEPTION_IF_NULL(value);
auto abs_value = value->ToAbstract()->cast<abstract::AbstractTensorPtr>();
auto ref_key = std::make_shared<RefKey>(param_node->name());
if (args_abs_parameters.find(ref_key->value()) != args_abs_parameters.end()) {
MS_LOG(EXCEPTION) << "Should not use the Parameter as construct()/ms_function parameter and free variable"
<< "(Including using class member by self.xxx) at the same time. The Parameter object is '"
<< ref_key->value() << "'.\n\n"
<< "To visit them simultaneously may cause an unexpected result,"
<< " you'd better remove one of the two usage modes.";
}
auto abs_ref = std::make_shared<abstract::AbstractRefTensor>(abs_value, ref_key);
context->ParallelParameterContextRestoreShape(func_graph, param_node, abs_ref);
args_abs.push_back(abs_ref);

View File

@ -260,6 +260,8 @@ bool ResolveObjectToNode(const AnfNodePtr &origin_node, const py::object &obj, A
}
args.push_back(out);
}
// The ParameterTuple will not be added in order list,
// since we don't want to deal with its RefTensor elements during auto_monad procedure.
output = NewCNode(std::move(args), func_graph);
} else {
ValuePtr convert_result = nullptr;

View File

@ -1158,7 +1158,7 @@ void ProcessVmArgInner(const py::tuple &args, const ResourcePtr &res, VectorRef
std::vector<AnfNodePtr> graph_params = graph->parameters();
std::size_t graph_params_size = graph_params.size();
if ((*arg_list).size() != graph_params_size) {
// maybe some default parameter
// Maybe some default parameter
for (std::size_t i = (*arg_list).size(); i < graph_params_size; i++) {
MS_EXCEPTION_IF_NULL(graph_params[i]);
auto param_ptr = (graph_params[i])->cast<ParameterPtr>();
@ -1194,7 +1194,6 @@ py::object GraphExecutorPy::Run(const py::tuple &args, const py::object &phase_o
#ifdef ENABLE_DEBUGGER
TerminateDebugger();
#endif
std::size_t size = args.size();
if (!py::isinstance<py::str>(phase_obj)) {
MS_LOG(EXCEPTION) << "Run failed, phase input is not a str";
}
@ -1224,8 +1223,8 @@ py::object GraphExecutorPy::Run(const py::tuple &args, const py::object &phase_o
}
auto &execute_info = iter->second;
MS_EXCEPTION_IF_NULL(execute_info);
if (size > execute_info->arg_list_size) {
MS_LOG(WARNING) << "The arg num : size = " << size << ". full_arg_size = " << execute_info->arg_list_size;
if (args.size() > execute_info->arg_list_size) {
MS_LOG(WARNING) << "The args size: " << args.size() << ", full_arg_size: " << execute_info->arg_list_size;
}
ProcessVmArg(args, phase, &execute_info->arg_list);
// Start to run phase.

View File

@ -654,7 +654,7 @@ class SideEffectFinder {
if (IsPrimitiveEquals(prim, prim::kPrimTupleGetItem)) {
return TraceTupleGetItemEffectInfo(cnode, tuple_indexes);
}
// Trace primitive propagating side effect from its input, such as Depend, Identity, etc.
// Trace primitive propagating side effect from its input, such as Depend, etc.
int input_index = GetSideEffectPropagate(prim);
if (input_index > 0 && input_index < static_cast<int>(cnode->size())) {
return TraceTupleEffectInfo(cnode->input(static_cast<size_t>(input_index)), tuple_indexes);

View File

@ -1225,7 +1225,7 @@ AbstractBasePtr AbstractRefTensor::Broaden() const {
// Always broaden for ref
auto abs_tensor = AbstractTensor::Broaden()->cast<AbstractTensorPtr>();
// Broaden the tensor value and keep the ref_key_value.
auto ret = std::make_shared<AbstractRefTensor>(abs_tensor, ref_key_value());
auto ret = std::make_shared<AbstractRefTensor>(abs_tensor, kAnyValue);
return ret;
}

View File

@ -6399,13 +6399,9 @@ class Identity(Primitive):
[1 2 3 4]
"""
# Side effect is identity with input.
side_effect_propagate = 1
@prim_attr_register
def __init__(self):
"""Initialize identity"""
self.add_prim_attr('side_effect_propagate', 1)
pass
class Range(PrimitiveWithCheck):

View File

@ -1,4 +1,4 @@
# Copyright 2022 Huawei Technologies Co., Ltd
# Copyright 2020-2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -17,18 +17,24 @@ import numpy as np
import pytest
import mindspore.nn as nn
from mindspore import Tensor, Parameter
from mindspore.common import mutable
from mindspore import Tensor, Parameter, ParameterTuple
from mindspore import context
from mindspore.ops import composite as C
import mindspore.ops as ops
context.set_context(mode=context.GRAPH_MODE)
@pytest.fixture(scope="module", autouse=True)
def setup_teardown():
yield
context.set_context(mode=context.GRAPH_MODE)
class FirstInputTupleNet(nn.Cell):
def construct(self, tuple_a, tensor_x, list_b, tensor_y, scalar, dict_c, flag):
def construct(self, tuple_a, tensor_a, list_b, tensor_b, scalar, dict_c, flag):
if flag:
return tensor_x - tuple_a[2] + list_b[1][1]["x"] - tensor_y + scalar - dict_c["x"]
return tensor_x + tuple_a[2] - list_b[1][1]["y"] + tensor_y - scalar + dict_c["y"]
return tensor_a - tuple_a[2] + list_b[1][1]["x"] - tensor_b + scalar - dict_c["x"]
return tensor_a + tuple_a[2] - list_b[1][1]["y"] + tensor_b - scalar + dict_c["y"]
class GradNet(nn.Cell):
@ -38,8 +44,8 @@ class GradNet(nn.Cell):
self.sens = Tensor(np.ones((2, 2), np.float32) * 5)
self.grad_all = C.GradOperation(get_all=get_all)
def construct(self, tuple_a, tensor_x, list_b, tensor_y, scalar, dict_c, flag):
return self.grad_all(self.forward_net)(tuple_a, tensor_x, list_b, tensor_y, scalar, dict_c, flag)
def construct(self, tuple_a, tensor_a, list_b, tensor_b, scalar, dict_c, flag):
return self.grad_all(self.forward_net)(tuple_a, tensor_a, list_b, tensor_b, scalar, dict_c, flag)
class GradNet1(nn.Cell):
@ -49,68 +55,312 @@ class GradNet1(nn.Cell):
self.sens = Tensor(np.ones((2, 2), np.float32) * 5)
self.grad_all = C.GradOperation(get_all=get_all)
def construct(self, tuple_a, tensor_x, list_b, tensor_y, tensor_z, dict_c):
return self.grad_all(self.forward_net)(tuple_a, tensor_x, list_b, tensor_y, tensor_z, dict_c)
def construct(self, tuple_a, tensor_a, list_b, tensor_b, tensor_c, dict_c):
return self.grad_all(self.forward_net)(tuple_a, tensor_a, list_b, tensor_b, tensor_c, dict_c)
x = Tensor(np.ones((2, 2), np.float32))
y = Tensor(np.ones((2, 2), np.float32) * 2)
z = Tensor(np.ones((2, 2), np.float32) * 3)
w = Tensor(np.ones((2, 2), np.float32) * 4)
sl = 6
s = "ok"
arg_t0 = (x, y, z, w)
arg_t1 = (w, y, z, w)
arg_l0 = [[x, x], [[x, y], {"x": x, "y": y, "z": x, "p": y}]]
arg_l1 = [[x, x], [[x, y], {"x": x, "y": y, "z": x, "p": y}]]
args_d0 = {"x": x, "y": y}
args_d1 = {"x": x, "y": y}
tensor_x = Tensor(np.ones((2, 2), np.float32))
tensor_y = Tensor(np.ones((2, 2), np.float32) * 2)
tensor_z = Tensor(np.ones((2, 2), np.float32) * 3)
tensor_w = Tensor(np.ones((2, 2), np.float32) * 4)
SCALAR_NUM = 6
STRING_INPUT = "ok"
tuple_arg = (tensor_x, tensor_y, tensor_z, tensor_w)
list_arg = [[tensor_x, tensor_x], [[tensor_x, tensor_y], {"x": tensor_x, "y": tensor_y, "z": tensor_x, "p": tensor_y}]]
dict_arg = {"x": tensor_x, "y": tensor_y}
flag_0 = True
flag_1 = False
p = Parameter(x, name="weight")
a = np.ones((2, 2))
parameter_x = Parameter(tensor_x, name="weight")
forward_net = FirstInputTupleNet()
forward_net.set_grad()
grad_all_inputs_net = GradNet(forward_net, get_all=True)
def test_grad_first_input_net():
@pytest.mark.parametrize('mode', [context.PYNATIVE_MODE, context.GRAPH_MODE])
def test_grad_first_input_net(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Normal input type.
Expectation: No exception.
"""
class FirstInputTensorNet(nn.Cell):
def construct(self, tensor_x, tuple_a, list_b, tensor_y, tensor_z, dict_c):
return tensor_x + tuple_a[2] - list_b[1][1]["y"] + tensor_y - tensor_z + dict_c["y"]
def construct(self, tensor_a, tuple_a, list_b, tensor_b, tensor_c, dict_c):
return tensor_a + tuple_a[0] - list_b[1][1]["y"] + tensor_b - tensor_c + dict_c["y"]
context.set_context(mode=mode)
grad_fist_input_tensor_net = GradNet1(FirstInputTensorNet(), get_all=False)
ret = grad_fist_input_tensor_net(z, arg_t0, arg_l0, w, y, args_d0)
assert np.allclose(ret.asnumpy(), np.ones((2, 2), np.float32))
res = grad_fist_input_tensor_net(tensor_z, tuple_arg, list_arg, tensor_w, tensor_y, dict_arg)
print('res:', res)
assert np.allclose(res.asnumpy(), np.ones((2, 2), np.float32))
def test_net_inputs_including_str():
# PyNative run error.
# Support context.PYNATIVE_MODE later.
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_grad_first_input_net_pynative_error(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Normal input type.
Expectation: No exception.
"""
class FirstInputTensorNet(nn.Cell):
def construct(self, tensor_a, tuple_a, list_b, tensor_b, tensor_c, dict_c):
return tensor_a + tuple_a[2] - list_b[1][1]["y"] + tensor_b - tensor_c + dict_c["y"]
context.set_context(mode=mode)
grad_fist_input_tensor_net = GradNet1(FirstInputTensorNet(), get_all=False)
res = grad_fist_input_tensor_net(tensor_z, tuple_arg, list_arg, tensor_w, tensor_y, dict_arg)
print('res:', res)
assert np.allclose(res.asnumpy(), np.ones((2, 2), np.float32))
@pytest.mark.parametrize('mode', [context.PYNATIVE_MODE, context.GRAPH_MODE])
def test_net_inputs_including_str(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: String input type.
Expectation: No exception.
"""
context.set_context(mode=mode)
with pytest.raises(TypeError) as err:
grad_all_inputs_net(arg_t0, s, arg_l0, w, sl, args_d0, flag_0)
assert "The inputs types of the outermost network 'GradNet.construct' support bool, int, float, None, Tensor, " \
grad_all_inputs_net(tuple_arg, STRING_INPUT, list_arg, tensor_w, SCALAR_NUM, dict_arg, flag_0)
print('err: ', str(err.value))
# network is 'GradNet.construct' in GraphMode.
# network is 'FirstInputTupleNet.construct' in PynativeMode.
assert "The inputs types of the outermost network" in str(err.value)
assert "support bool, int, float, None, Tensor, " \
"Parameter, mstype.Number(mstype.bool, mstype.int, mstype.float, mstype.uint), " \
"and tuple or list containing only these types, and dict whose values are these types, " \
"but the 1th arg type is <class 'str'>, value is 'ok'" in str(err.value)
# Support the Parameter as outermost input.
def test_outermost_net_pass_parameter():
forward_net(arg_t0, p, arg_l0, w, sl, args_d0, flag_0)
@pytest.mark.parametrize('mode', [context.PYNATIVE_MODE, context.GRAPH_MODE])
def test_outermost_net_pass_parameter(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Parameter input type.
Expectation: No exception.
"""
context.set_context(mode=mode)
forward_net(tuple_arg, parameter_x, list_arg, tensor_w, SCALAR_NUM, dict_arg, flag_0)
# Support the Parameter as outermost input.
def test_outermost_net_pass_tuple_including_parameter():
forward_net(arg_t0, z, arg_l0, sl, (z, w, p), args_d0, flag_0)
# Support context.PYNATIVE_MODE UT later.
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_outermost_net_pass_tuple_including_parameter(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Tuple with Parameter as input type.
Expectation: No exception.
"""
context.set_context(mode=mode)
mutable_tuple = mutable((tensor_z, tensor_w, parameter_x))
forward_net(tuple_arg, tensor_z, list_arg, SCALAR_NUM, mutable_tuple, dict_arg, flag_0)
# Support the Parameter as outermost input.
def test_outermost_net_pass_list_including_parameter():
forward_net(arg_t0, z, arg_l0, sl, [z, w, p], args_d0, flag_0)
# Support context.PYNATIVE_MODE UT later.
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_outermost_net_pass_list_including_parameter(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: List with Parameter as input type.
Expectation: No exception.
"""
context.set_context(mode=mode)
mutable_list = mutable([tensor_z, tensor_w, parameter_x])
forward_net(tuple_arg, tensor_z, list_arg, SCALAR_NUM, mutable_list, dict_arg, flag_0)
# Support the Parameter as outermost input.
def test_grad_net_pass_dict_including_parameter():
with pytest.raises(RuntimeError) as err:
forward_net(arg_t0, z, arg_l0, sl, sl, {"x": z, "y": w, "z": p}, flag_0)
assert "Illegal type in the graph: AbstractDictionary" in str(err.value)
# Support context.PYNATIVE_MODE UT later.
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_grad_net_pass_dict_including_parameter(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Dict with Parameter as input type.
Expectation: No exception.
"""
context.set_context(mode=mode)
mutable_dict = mutable({"x": tensor_z, "y": tensor_w, "z": parameter_x})
forward_net(tuple_arg, tensor_z, list_arg, SCALAR_NUM, SCALAR_NUM, mutable_dict, flag_0)
class TestCell(nn.Cell):
def __init__(self, param):
super().__init__()
self.a = Tensor(np.array([[1, 2], [3, 4]]))
self.param = param
def construct(self, x):
return self.a * self.param * x
class GradCellWithParameter(nn.Cell):
def __init__(self, net):
super().__init__()
self.net = net
self.grad = ops.GradOperation(get_all=True, get_by_list=True)
self.param = self.net.param
def construct(self, x):
return self.grad(self.net, self.param)(x)
class GradCell(nn.Cell):
def __init__(self, net):
super().__init__()
self.net = net
self.grad_all = ops.GradOperation(get_all=True)
def construct(self, x):
return self.grad_all(self.net)(x)
@pytest.mark.parametrize('mode', [context.PYNATIVE_MODE, context.GRAPH_MODE])
def test_grad_parameter_input(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Grad with Parameter as input type.
Expectation: No exception.
"""
context.set_context(mode=mode)
x = Parameter(Tensor(np.array([[1, 2], [3, 4]])), name='input_x')
y = Parameter(Tensor(np.array([[7, 8], [9, 0]])), name='input_y')
z = Tensor(np.array([[7, 8], [9, 0]]))
a = GradCell(TestCell(x))(y)
b = GradCell(TestCell(x))(z)
print(f'a: {a}')
print(f'b: {b}')
assert np.array_equal(a[0].asnumpy(), b[0].asnumpy())
# PyNative run error.
# Support context.PYNATIVE_MODE later.
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_grad_parameter_as_input_and_fv(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Grad with Parameters as input type and fv.
Expectation: No exception.
"""
context.set_context(mode=mode)
x = Parameter(Tensor(np.array([[1, 2], [3, 4]])), name='input_x')
y = Parameter(Tensor(np.array([[7, 8], [9, 0]])), name='input_y')
z = Tensor(np.array([[7, 8], [9, 0]]))
a = GradCellWithParameter(TestCell(x))(y)
b = GradCellWithParameter(TestCell(x))(z)
print(f'a: {a}')
print(f'b: {b}')
assert np.array_equal(a[0][0].asnumpy(), b[0][0].asnumpy())
assert np.array_equal(a[1].asnumpy(), b[1].asnumpy())
# PyNative run error.
# Support context.PYNATIVE_MODE later.
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_grad_same_parameter_both_input_and_fv(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Grad with the same Parameter used as input type and fv at the same time.
Expectation: No exception.
"""
context.set_context(mode=mode)
x = Parameter(Tensor(np.array([[1, 2], [3, 4]])), name='input_x')
y = Tensor(np.array([[1, 2], [3, 4]]))
a = GradCellWithParameter(TestCell(x))(x)
b = GradCellWithParameter(TestCell(x))(y)
print(f'a: {a}')
print(f'b: {b}')
assert np.array_equal(a[0][0].asnumpy(), b[0][0].asnumpy())
assert np.array_equal(a[1].asnumpy(), b[1].asnumpy())
class TestCell2(nn.Cell):
def __init__(self, param1, param2):
super().__init__()
self.a = Tensor(np.array([[1, 2], [3, 4]]))
self.param1 = param1
self.param2 = param2
def construct(self, x):
return self.a * self.param1 * self.param2 * x
class GradCellWithParameterTuple(nn.Cell):
def __init__(self, net):
super().__init__()
self.net = net
self.grad = ops.GradOperation(get_all=True, get_by_list=True)
self.param1 = self.net.param1
self.param2 = self.net.param2
self.params = ParameterTuple([self.param1, self.param2])
def construct(self, x):
return self.grad(self.net, self.params)(x)
class GradCellWithListOfParameter(nn.Cell):
def __init__(self, net):
super().__init__()
self.net = net
self.grad = ops.GradOperation(get_all=True, get_by_list=True)
self.param1 = self.net.param1
self.param2 = self.net.param2
def construct(self, x):
return self.grad(self.net, [self.param1, self.param2])(x)
class GradCellWithTupleOfParameter(nn.Cell):
def __init__(self, net):
super().__init__()
self.net = net
self.grad = ops.GradOperation(get_all=True, get_by_list=True)
self.param1 = self.net.param1
self.param2 = self.net.param2
def construct(self, x):
return self.grad(self.net, [self.param1, self.param2])(x)
@pytest.mark.parametrize('mode', [context.PYNATIVE_MODE, context.GRAPH_MODE])
def test_grad_parameter_as_input_and_fv2(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Grad with Parameters as input type and fv. ParameterTuple as fv.
Expectation: No exception.
"""
context.set_context(mode=mode)
x1 = Parameter(Tensor(np.array([[1, 2], [3, 4]])), name='input_x1')
x2 = Parameter(Tensor(np.array([[1, 2], [3, 4]])), name='input_x2')
y = Parameter(Tensor(np.array([[7, 8], [9, 0]])), name='input_y')
z = Tensor(np.array([[7, 8], [9, 0]]))
a = GradCellWithParameterTuple(TestCell2(x1, x2))(y)
b = GradCellWithParameterTuple(TestCell2(x1, x2))(z)
print(f'a: {a}')
print(f'b: {b}')
assert np.array_equal(a[0][0].asnumpy(), b[0][0].asnumpy())
assert np.array_equal(a[1][0].asnumpy(), b[1][0].asnumpy())
assert np.array_equal(a[1][1].asnumpy(), b[1][1].asnumpy())
@pytest.mark.skip(reason='Not support list or tuple of parameters as GradOperation inputs by now')
@pytest.mark.parametrize('mode', [context.PYNATIVE_MODE, context.GRAPH_MODE])
def test_grad_parameter_list_or_tuple(mode):
"""
Feature: Construct()/ms_function input type with back propagate.
Description: Grad with Parameters as input type and fv. list or tuple as fv of grad.
Expectation: No exception.
"""
context.set_context(mode=mode)
x1 = Parameter(Tensor(np.array([[1, 2], [3, 4]])), name='input_x1')
x2 = Parameter(Tensor(np.array([[1, 2], [3, 4]])), name='input_x2')
y = Tensor(np.array([[7, 8], [9, 0]]))
# Should not throw exception.
GradCellWithListOfParameter(TestCell2(x1, x2))(y)
GradCellWithTupleOfParameter(TestCell2(x1, x2))(y)