Rectification of test cases according to ST test case specifications.

This commit is contained in:
Margaret_wangrui 2022-10-13 12:55:47 +08:00
parent f3f648a6d7
commit 42a2da638b
4 changed files with 130 additions and 38 deletions

View File

@ -165,6 +165,7 @@
"mindspore/tests/st/numpy_native/test_array_ops.py" "useless-super-delegation"
"mindspore/tests/ut/python/mindir/test_mindir_export.py" "no-else-return"
"mindspore/tests/" "c-extension-no-member"
"mindspore/tests/st/parameter/test_parameter_celllist.py" "protected-access"
#MindSpore Lite
"mindspore/mindspore/ccsrc/plugin/device/cpu/kernel/nnacl/experimental/HPC-generator/generator.py" "redefined-builtin"

View File

@ -21,14 +21,13 @@ from mindspore.common.parameter import Parameter
from mindspore.common import ParameterTuple
from mindspore import Tensor, context
context.set_context(mode=context.GRAPH_MODE)
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_1_1():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_1_1(mode):
"""
Feature: Check the names of parameters and the names of inputs of construct.
Description: If the name of the input of construct is same as the parameters, add suffix to the name of the input.
@ -44,16 +43,20 @@ def test_parameter_1_1():
def construct(self, name_a):
return self.param_a + self.param_b - name_a
context.set_context(mode=mode)
net = ParamNet()
res = net(Tensor([3], ms.float32))
assert res == 0
assert net.param_a.name == "name_a"
assert net.param_b.name == "name_b"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_1_2():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_1_2(mode):
"""
Feature: Check the names of parameters and the names of inputs of construct.
Description: If the name of the input of construct is same as the parameters, add suffix to the name of the input.
@ -69,16 +72,21 @@ def test_parameter_1_2():
def construct(self, name_b):
return self.param_a + self.param_b[0] - name_b
context.set_context(mode=mode)
net = ParamNet()
res = net(Tensor([3], ms.float32))
assert res == 0
assert net.param_a.name == "name_a"
assert net.param_b[0].name == "name_b"
assert net.param_b[1].name == "name_a"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_2_1():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_2_1(mode):
"""
Feature: Check the names of parameters.
Description: If parameters in init have same name, an exception will be thrown.
@ -95,6 +103,7 @@ def test_parameter_2_1():
return self.param_a + self.param_b
with pytest.raises(ValueError, match="its name 'name_a' already exists."):
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 3
@ -104,7 +113,8 @@ def test_parameter_2_1():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_2_2():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_2_2(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -123,6 +133,7 @@ def test_parameter_2_2():
return self.param_a + self.res1[0] + self.res2
with pytest.raises(ValueError, match="its name 'name_a' already exists."):
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 10
@ -132,7 +143,8 @@ def test_parameter_2_2():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_3():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_3(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -148,16 +160,20 @@ def test_parameter_3():
def construct(self):
return self.param_a + self.param_b
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 3
assert net.param_a.name == "param_a"
assert net.param_b.name == "param_b"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_4():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_4(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -174,6 +190,7 @@ def test_parameter_4():
return self.res1[0] + self.res1[1]
with pytest.raises(ValueError, match="its name 'name_a' already exists."):
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 6
@ -183,7 +200,8 @@ def test_parameter_4():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_5_1():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_5_1(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -199,6 +217,7 @@ def test_parameter_5_1():
return self.res1[0] + self.res1[1]
with pytest.raises(ValueError, match="its name 'Parameter' already exists."):
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 6
@ -208,7 +227,8 @@ def test_parameter_5_1():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_5_2():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_5_2(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -226,16 +246,21 @@ def test_parameter_5_2():
def construct(self):
return self.param_a + self.res1[0] + self.res2
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 10
assert net.param_a.name == "name_b"
assert net.res1[0].name == "Parameter$1"
assert net.res1[1].name == "name_a"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_list_tuple_no_name():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_list_tuple_no_name(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -251,16 +276,22 @@ def test_parameter_list_tuple_no_name():
def construct(self):
return self.param_tuple[0] + self.param_tuple[1] + self.param_list[0] + self.param_list[1]
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 26
assert net.param_tuple[0].name == "Parameter$1"
assert net.param_tuple[1].name == "Parameter$2"
assert net.param_list[0].name == "Parameter$3"
assert net.param_list[1].name == "Parameter$4"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_in_tuple():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_in_tuple(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -277,16 +308,22 @@ def test_parameter_in_tuple():
def construct(self):
return self.param_a + self.param_b + self.param_tuple[0] + self.param_tuple[1]
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 6
assert net.param_a.name == "name_a"
assert net.param_b.name == "name_b"
assert net.param_tuple[0].name == "name_a"
assert net.param_tuple[1].name == "name_b"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_parameter_tuple_1():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_parameter_tuple_1(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -304,6 +341,7 @@ def test_parameter_parameter_tuple_1():
return self.param_a + self.param_tuple[0] + self.param_tuple[1]
with pytest.raises(ValueError, match="its name 'name_a' already exists."):
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 11
@ -313,7 +351,8 @@ def test_parameter_parameter_tuple_1():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_parameter_tuple_2():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_parameter_tuple_2(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in init.
@ -329,16 +368,22 @@ def test_parameter_parameter_tuple_2():
def construct(self):
return self.param_a + self.param_tuple[0] + self.param_tuple[1] + self.param_tuple[2]
context.set_context(mode=mode)
net = ParamNet()
res = net()
assert res == 4
assert net.param_a.name == "name_a"
assert net.param_tuple[0].name == "name_a"
assert net.param_tuple[1].name == "name_a"
assert net.param_tuple[2].name == "name_a"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_parameter():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter(mode):
"""
Feature: Check the names of parameters.
Description: If parameter in list or tuple is not given a name, will give it a unique name.
@ -363,6 +408,7 @@ def test_parameter():
res2 = self.param_list[0] + self.param_list[1]
return res1, res2
context.set_context(mode=mode)
net = ParamNet()
x = Tensor([10], ms.float32)
output1, output2 = net(x)
@ -370,13 +416,22 @@ def test_parameter():
output2_expect = Tensor(11, ms.float32)
assert output1 == output1_expect
assert output2 == output2_expect
assert net.param_a.name == "name_a"
assert net.param_b.name == "name_b"
assert net.param_c.name == "param_c"
assert net.param_d.name == "param_d"
assert net.param_tuple[0].name == "Parameter$1"
assert net.param_tuple[1].name == "Parameter$2"
assert net.param_list[0].name == "Parameter$3"
assert net.param_list[1].name == "Parameter$4"
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_parameter_same_name_between_tuple_or_list():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_same_name_between_tuple_or_list(mode):
"""
Feature: Check the names of parameters between tuple or list.
Description: If the same name exists between tuple and list, an exception will be thrown.
@ -396,6 +451,7 @@ def test_parameter_same_name_between_tuple_or_list():
return res
with pytest.raises(ValueError, match="its name 'name_a' already exists."):
context.set_context(mode=mode)
net = ParamNet()
x = Tensor([10], ms.float32)
output = net(x)
@ -409,7 +465,8 @@ def test_parameter_same_name_between_tuple_or_list():
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_parameter_argument_and_fv():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_argument_and_fv(mode):
"""
Feature: Parameter argmument in top func graph.
Description: Use Parameter as input argmument.
@ -423,6 +480,7 @@ def test_parameter_argument_and_fv():
ms.ops.Assign()(y, Tensor([0]))
return True
context.set_context(mode=mode)
x = Parameter(Tensor([1]))
net = Demo()
net(x)
@ -437,7 +495,8 @@ def test_parameter_argument_and_fv():
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_parameter_argument_grad():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_parameter_argument_grad(mode):
"""
Feature: Parameter argmument in top func graph.
Description: Use Parameter as input argmument, and pass it to varargs.
@ -455,6 +514,7 @@ def test_parameter_argument_grad():
ms.ops.Assign()(y, param)
return param
context.set_context(mode=mode)
param = Parameter(Tensor(np.array([[0, 0], [0, 0]]), ms.float32), name='param')
x = Parameter(Tensor(np.array([[4.0, -8.0], [-2.0, -5.0]]), ms.float32), name='x')
y = Parameter(Tensor(np.array([[1, 0], [1, 1]]), ms.float32), name='y')

View File

@ -23,8 +23,6 @@ from mindspore import Tensor, context
from mindspore.common.parameter import ParameterTuple, Parameter
from mindspore.common.initializer import initializer
context.set_context(mode=context.GRAPH_MODE)
class FullyConnectedNet(nn.Cell):
def __init__(self, input_size, hidden_size, output_size):
@ -70,18 +68,30 @@ class EmaUpdate(nn.Cell):
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_target_update():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_target_update(mode):
"""
Feature: manage parameters with CellList.
Description: Check the name of parameter in CellList.
Expectation: No exception.
"""
context.set_context(mode=mode)
policy_net = FullyConnectedNet(4, 100, 2)
target_net = FullyConnectedNet(4, 100, 2)
tau = 0.2
tau_tensor = Tensor(np.array([tau], dtype=np.float32))
ema_update = EmaUpdate(policy_net, target_net, tau_tensor, period=1)
ema_update()
res = ema_update()
assert res == 1
assert ema_update.step.name == "step"
assert ema_update.policy_param[0].name == "0.linear1.weight"
assert ema_update.policy_param[1].name == "0.linear1.bias"
assert ema_update.policy_param[2].name == "0.linear2.weight"
assert ema_update.policy_param[3].name == "0.linear2.bias"
assert ema_update.target_param[0].name == "1.linear1.weight"
assert ema_update.target_param[1].name == "1.linear1.bias"
assert ema_update.target_param[2].name == "1.linear2.weight"
assert ema_update.target_param[3].name == "1.linear2.bias"
class DenseNet(nn.Cell):
@ -99,16 +109,21 @@ class DenseNet(nn.Cell):
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_two_dense_net():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_two_dense_net(mode):
"""
Feature: Check the name of parameter .
Description: Check the name of parameter in two network.
Expectation: No exception.
"""
context.set_context(mode=mode)
x = Tensor(np.random.randn(4, 16).astype(np.float32))
net = DenseNet()
res = net(x)
print("res:", res)
net(x)
assert net.fc1._params['weight'].name == "fc1.weight"
assert net.fc1._params['bias'].name == "fc1.bias"
assert net.fc2._params['weight'].name == "fc2.weight"
assert net.fc2._params['bias'].name == "fc2.bias"
class InnerNet(nn.Cell):
@ -124,18 +139,20 @@ class InnerNet(nn.Cell):
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_two_net():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_two_net(mode):
"""
Feature: Check the name of parameter .
Description: Check the name of parameter in two network.
Expectation: No exception.
"""
context.set_context(mode=mode)
net1 = InnerNet()
net2 = InnerNet()
res1 = net1(Tensor([1], ms.float32))
res2 = net2(Tensor([1], ms.float32))
print("res1:", res1)
print("res2:", res2)
assert res1 == res2 == 2
assert net1.param.name == net1.param.name == "name_a"
class OutNet1(nn.Cell):
@ -152,13 +169,15 @@ class OutNet1(nn.Cell):
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_inner_out_net_1():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_inner_out_net_1(mode):
"""
Feature: Check the name of parameter .
Description: Check the name of parameter in two network.
Expectation: No exception.
"""
with pytest.raises(RuntimeError, match="its name 'name_a' already exists."):
context.set_context(mode=mode)
net1 = InnerNet()
net2 = InnerNet()
out_net = OutNet1(net1, net2)
@ -183,14 +202,18 @@ class OutNet2(nn.Cell):
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_inner_out_net_2():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE, context.PYNATIVE_MODE])
def test_inner_out_net_2(mode):
"""
Feature: Check the name of parameter .
Description: Check the name of parameter in two network.
Expectation: No exception.
"""
context.set_context(mode=mode)
net1 = InnerNet()
net2 = InnerNet()
out_net = OutNet2(net1, net2)
res = out_net(Tensor([1], ms.float32))
print("res:", res)
assert res == 3
assert out_net.param1[0].name == "0.param"
assert out_net.param2[0].name == "1.param"

View File

@ -18,14 +18,13 @@ from mindspore import context, Tensor, ms_function
from mindspore.common.parameter import Parameter
from mindspore.common import ParameterTuple
context.set_context(mode=context.GRAPH_MODE)
@pytest.mark.level1
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_ms_function_1():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_ms_function_1(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in ms_function.
@ -39,6 +38,7 @@ def test_parameter_ms_function_1():
return param_a + param_b
with pytest.raises(RuntimeError, match="its name 'name_a' already exists."):
context.set_context(mode=mode)
res = test_parameter_ms_function()
assert res == 3
@ -47,7 +47,8 @@ def test_parameter_ms_function_1():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_ms_function_2():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_ms_function_2(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in ms_function.
@ -60,6 +61,7 @@ def test_parameter_ms_function_2():
def test_parameter_ms_function():
return param_a + param_b
context.set_context(mode=mode)
res = test_parameter_ms_function()
assert res == 2
@ -68,7 +70,8 @@ def test_parameter_ms_function_2():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_ms_function_3():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_ms_function_3(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in ms_function.
@ -82,6 +85,7 @@ def test_parameter_ms_function_3():
return param_a + param_b
with pytest.raises(RuntimeError, match="its name 'Parameter' already exists."):
context.set_context(mode=mode)
res = test_parameter_ms_function()
assert res == 3
@ -90,7 +94,8 @@ def test_parameter_ms_function_3():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_ms_function_4():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_ms_function_4(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in ms_function.
@ -104,6 +109,7 @@ def test_parameter_ms_function_4():
def test_parameter_ms_function():
return param_a[0] + param_a[1]
context.set_context(mode=mode)
res = test_parameter_ms_function()
assert res == 3
@ -112,7 +118,8 @@ def test_parameter_ms_function_4():
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_parameter_ms_function_5():
@pytest.mark.parametrize('mode', [context.GRAPH_MODE])
def test_parameter_ms_function_5(mode):
"""
Feature: Check the names of parameters.
Description: Check the name of parameter in ms_function.
@ -125,5 +132,6 @@ def test_parameter_ms_function_5():
def test_parameter_ms_function():
return param_a[0] + param_a[1]
context.set_context(mode=mode)
res = test_parameter_ms_function()
assert res == 3