open log info

This commit is contained in:
chenfei 2021-08-31 09:57:27 +08:00
parent 22a33e68a2
commit 2188d80354
51 changed files with 570 additions and 988 deletions

View File

@ -550,9 +550,8 @@ def test_side_effect_grad_two_addn_switch():
out1 = net.grad_mindspore_impl(inputs, grad_ys)
net = SideEffectTwoAddnSwitchNet()
try:
context.set_context(mode=context.PYNATIVE_MODE)
out2 = net.grad_mindspore_impl(inputs, grad_ys)
allclose_nparray(out1[0][0].asnumpy(), out2[0][0].asnumpy(), 0.001, 0.001)
expect = 54.0
allclose_nparray(out1[0][0].asnumpy(), expect, 0.001, 0.001)
finally:
context.set_context(mode=context.GRAPH_MODE)
@ -593,9 +592,8 @@ def test_side_effect_grad_if():
out1 = net.grad_mindspore_impl(inputs, grad_ys)
net = SideEffectGradIfNet()
try:
context.set_context(mode=context.PYNATIVE_MODE)
out2 = net.grad_mindspore_impl(inputs, grad_ys)
allclose_nparray(out1.asnumpy(), out2.asnumpy(), 0.001, 0.001)
expect = 18.0
allclose_nparray(out1.asnumpy(), expect, 0.001, 0.001)
finally:
context.set_context(mode=context.GRAPH_MODE)
@ -687,11 +685,11 @@ def test_side_effect_grad_control_flow_assign_depend_while_net():
inputs2 = Tensor([6.0], ms.float32)
inputs3 = Tensor([3.0], ms.float32)
out1 = net.grad_mindspore_impl(inputs1, inputs2, inputs3, grad_ys)
try:
context.set_context(mode=context.PYNATIVE_MODE)
net = SideEffectControlFlowAssignDependWhileNet()
out2 = net.grad_mindspore_impl(inputs1, inputs2, inputs3, grad_ys)
allclose_nparray(out1[0][0].asnumpy(), out2[0][0].asnumpy(), 0.001, 0.001)
allclose_nparray(out1[1][0].asnumpy(), out2[1][0].asnumpy(), 0.001, 0.001)
expect1 = 18.0
expect2 = 0
allclose_nparray(out1[0][0].asnumpy(), expect1, 0.001, 0.001)
allclose_nparray(out1[1][0].asnumpy(), expect2, 0.001, 0.001)
finally:
context.set_context(mode=context.GRAPH_MODE)

View File

@ -57,7 +57,7 @@ class GradNet(nn.Cell):
return grad_all(self.net)(*inputs)
def control_flow_single_if(input_net, x, y):
def control_flow_single_if(input_net, x, y, expect1, expect2):
# graph mode
context.set_context(mode=context.GRAPH_MODE)
net = input_net()
@ -67,17 +67,9 @@ def control_flow_single_if(input_net, x, y):
graph_forward_res = forward_net(x, y)
graph_backward_res = grad_net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
net = input_net()
grad_net = GradNet(net)
assert graph_forward_res == expect1
assert graph_backward_res == expect2
forward_net = input_net()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = grad_net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -87,7 +79,10 @@ def control_flow_single_if(input_net, x, y):
def test_single_if():
x = Tensor(2, mstype.int32)
y = Tensor(5, mstype.int32)
control_flow_single_if(SingleIfNet, x, y)
expect1 = Tensor(26, mstype.int32)
expect2 = (Tensor(2, mstype.int32), Tensor(2, mstype.int32))
control_flow_single_if(SingleIfNet1, x, y, expect1, expect2)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -97,4 +92,6 @@ def test_single_if():
def test_single_if_01():
x = Tensor(2, mstype.int32)
y = Tensor(5, mstype.int32)
control_flow_single_if(SingleIfNet1, x, y)
expect1 = Tensor(26, mstype.int32)
expect2 = (Tensor(2, mstype.int32), Tensor(2, mstype.int32))
control_flow_single_if(SingleIfNet1, x, y, expect1, expect2)

View File

@ -19,7 +19,7 @@ from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, save_graphs=True)
context.set_context(mode=context.GRAPH_MODE, save_graphs=False)
class ForwardNet(nn.Cell):
@ -42,7 +42,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -55,7 +56,8 @@ def test_forward():
output = forward_net(c1, c2)
assert expect == output
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training

View File

@ -65,17 +65,8 @@ def test_single_for_01():
net = GradNet(for_net)
graph_backward_res = net(x, y, z)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_net_foward = SingleForNet()
pynative_forward_res = for_net_foward(x, y, z)
for_net = SingleForNet()
net = GradNet(for_net)
pynative_backward_res = net(x, y, z)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([125], mstype.int32)
assert graph_backward_res == (Tensor([15], mstype.int32), Tensor([40], mstype.int32), Tensor([5], mstype.int32))
@pytest.mark.level1
@ -118,17 +109,8 @@ def test_single_for_02():
graph_forward_res = for_net_forward(x, y, z)
graph_backward_res = net(x, y, z)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_net = SingleForNet()
net = GradNet(for_net)
for_net_forward = SingleForNet()
pynative_forward_res = for_net_forward(x, y, z)
pynative_backward_res = net(x, y, z)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([195], mstype.int32)
assert graph_backward_res == (Tensor([25], mstype.int32), Tensor([64], mstype.int32), Tensor([5], mstype.int32))
@pytest.mark.level1
@ -144,10 +126,8 @@ def test_single_for_03():
self.add = P.Add()
self.sub = P.Sub()
self.assign = P.Assign()
param_a = np.full((1,), 5, dtype=np.float32)
self.param_a = Parameter(Tensor(param_a), name='a')
param_b = np.full((1,), 2, dtype=np.float32)
self.param_b = Parameter(Tensor(param_b), name='b')
self.param_a = Parameter(Tensor([5], dtype=mstype.int32), name='a')
self.param_b = Parameter(Tensor([2], dtype=mstype.int32), name='b')
def func(self, x):
x = self.mul(x, 2)
@ -182,17 +162,8 @@ def test_single_for_03():
graph_forward_res = for_net_forward(x, y)
graph_backward_res = net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
single_for_net = SingleForNet()
net = GradNet(single_for_net)
for_net_forward = SingleForNet()
pynative_forward_res = for_net_forward(x, y)
pynative_backward_res = net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == (Tensor([133], mstype.int32), Tensor([7], mstype.int32))
assert graph_backward_res == (Tensor([64], mstype.int32), Tensor([1], mstype.int32))
@pytest.mark.level1
@ -208,10 +179,8 @@ def test_single_for_04():
self.add = P.Add()
self.sub = P.Sub()
self.assign = P.Assign()
param_a = np.full((1,), 5, dtype=np.float32)
self.param_a = Parameter(Tensor(param_a), name='a')
param_b = np.full((1,), 2, dtype=np.float32)
self.param_b = Parameter(Tensor(param_b), name='b')
self.param_a = Parameter(Tensor([5], dtype=mstype.int32), name='a')
self.param_b = Parameter(Tensor([2], dtype=mstype.int32), name='b')
def construct(self, x):
self.assign(self.param_a, x + self.param_a)
@ -238,17 +207,8 @@ def test_single_for_04():
graph_forward_res = for_net_forward(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
single_for_net = SingleForNet()
net = GradNet(single_for_net)
for_net_forward = SingleForNet()
pynative_forward_res = for_net_forward(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([-5], mstype.int32)
assert graph_backward_res == (Tensor([0], mstype.int32),)
@pytest.mark.level1
@ -292,14 +252,5 @@ def test_single_for_05():
graph_forward_res = for_net_forward(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
single_for_net = SingleForNet()
net = GradNet(single_for_net)
for_net_forward = SingleForNet()
pynative_forward_res = for_net_forward(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([6], mstype.int32)
assert graph_backward_res == (Tensor([1], mstype.int32),)

View File

@ -127,6 +127,7 @@ class IfInIfNet4(nn.Cell):
out += self.param_b
return out
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
@ -136,7 +137,7 @@ class GradNet(nn.Cell):
return grad_all(self.net)(*inputs)
def control_flow_if_in_if(input_net, x):
def control_flow_if_in_if(input_net, x, expect1, expect2):
# graph mode
context.set_context(mode=context.GRAPH_MODE)
net = input_net()
@ -146,60 +147,66 @@ def control_flow_if_in_if(input_net, x):
graph_forward_res = forward_net(x)
graph_backward_res = grad_net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
net = input_net()
grad_net = GradNet(net)
assert graph_forward_res == expect1
assert graph_backward_res == expect2
forward_net = input_net()
pynative_forward_res = forward_net(x)
pynative_backward_res = grad_net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_in_if():
x = Tensor(2, mstype.int32)
control_flow_if_in_if(IfInIfNet, x)
expect1 = Tensor(17, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_in_if(IfInIfNet, x, expect1, expect2)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_in_if_01():
x = Tensor(2, mstype.int32)
control_flow_if_in_if(IfInIfNet1, x)
expect1 = Tensor(22, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_in_if(IfInIfNet1, x, expect1, expect2)
@pytest.mark.skip(reason="Ascend compile error in multigraph sink.")
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_in_if_02():
x = Tensor(2, mstype.int32)
control_flow_if_in_if(IfInIfNet2, x)
expect1 = 0
expect2 = 0
control_flow_if_in_if(IfInIfNet2, x, expect1, expect2)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_in_if_03():
x = Tensor(2, mstype.int32)
control_flow_if_in_if(IfInIfNet3, x)
expect1 = Tensor(7, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_in_if(IfInIfNet3, x, expect1, expect2)
@pytest.mark.skip(reason="Result not correct in ascend vm")
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_in_if_04():
x = Tensor(2, mstype.int32)
control_flow_if_in_if(IfInIfNet4, x)
expect1 = 0
expect2 = 0
control_flow_if_in_if(IfInIfNet4, x, expect1, expect2)

View File

@ -43,7 +43,7 @@ class ForwardNet(nn.Cell):
F.assign(self.weight, i)
self.weight = i
i = i + 1
return out, self.weight
return out
class BackwardNet(nn.Cell):
@ -56,7 +56,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -68,11 +69,8 @@ def test_forward():
y = Tensor(np.array(3), mstype.int32)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
assert graph_mode_out == Tensor(21, mstype.int32)
@pytest.mark.level0
@ -88,9 +86,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(21, mstype.int32), Tensor(7, mstype.int32))

View File

@ -79,7 +79,7 @@ class BackwardNetReplaceBreak(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -91,10 +91,7 @@ def test_forward():
forward_net = ForwardNet(max_cycles=10)
graph_mode_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
assert graph_mode_out == Tensor(np.array(21), mstype.int32)
# Problem: Exceed function call depth limit 1000.
@ -110,14 +107,10 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(21), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -129,16 +122,11 @@ def test_forward_replace_break():
forward_net = ForwardNetReplaceBreak(max_cycles=10)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNetReplaceBreak(max_cycles=10)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(21), mstype.int32)
# Problem: Exceed function call depth limit 1000.
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -151,10 +139,4 @@ def test_backward_replace_break():
backward_net = BackwardNetReplaceBreak(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNetReplaceBreak(max_cycles=10)
backward_net = BackwardNetReplaceBreak(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(21), mstype.int32)

View File

@ -52,6 +52,7 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@ -64,13 +65,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
# context.set_context(mode=context.PYNATIVE_MODE)
# pynative_forward_net = ForwardNet(max_cycles=3)
# pynative_mode_out = pynative_forward_net(x, y)
expect = (Tensor(np.array(9), mstype.int32), Tensor(np.array(2), mstype.int32))
assert graph_mode_out == expect
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@ -84,10 +83,6 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
# context.set_context(mode=context.PYNATIVE_MODE)
# pynative_forward_net = ForwardNet(max_cycles=3)
# pynative_backward_net = BackwardNet(pynative_forward_net)
# pynative_mode_grads = pynative_backward_net(x, y)
expect = (Tensor(np.array(9), mstype.int32), Tensor(np.array(3), mstype.int32))
assert graph_mode_grads == expect

View File

@ -49,7 +49,7 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -61,15 +61,10 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(9), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -82,10 +77,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(9), mstype.int32)

View File

@ -37,13 +37,13 @@ class ForwardNet(nn.Cell):
def construct(self, x, y):
out = self.zero
i = self.i
if x > y:
if x < y:
while i < self.max_cycles:
self.weight = i
F.assign(self.weight, i)
out = x * y + out
out = x * y * self.weight + out
i = i + 1
return out, self.weight
return out
class BackwardNet(nn.Cell):
@ -57,27 +57,22 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.skip(reason="GPU backward result is error!")
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_forward():
def test_forward_gpu():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
# Graph Mode
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
assert graph_mode_out == Tensor(np.array(9), mstype.int32)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
@ -89,9 +84,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(9), mstype.int32), Tensor(np.array(3), mstype.int32))

View File

@ -66,12 +66,7 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.GRAPH_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(27), mstype.int32)
@pytest.mark.level1
@ -87,10 +82,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(27), mstype.int32)

View File

@ -64,12 +64,7 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(27), mstype.int32)
@pytest.mark.level1
@ -85,10 +80,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(27), mstype.int32)

View File

@ -23,7 +23,8 @@ from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -66,19 +67,11 @@ def test_for_in_if_01():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_if_net = ForInIfNet()
net = GradNet(for_in_if_net)
assert graph_forward_res == Tensor([640], mstype.int32)
assert graph_backward_res == (Tensor([64], mstype.int32),)
forward_net = ForInIfNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -125,19 +118,11 @@ def test_for_in_if_02():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_if_net = ForInIfNet()
net = GradNet(for_in_if_net)
assert graph_forward_res == Tensor([10], mstype.float32)
assert graph_backward_res == (Tensor([1], mstype.float32),)
forward_net = ForInIfNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -185,17 +170,9 @@ def test_for_in_if_03():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_if_net = ForInIfNet()
net = GradNet(for_in_if_net)
assert graph_forward_res == (Tensor([29], mstype.float32), Tensor([14], mstype.float32))
assert graph_backward_res == (Tensor([3], mstype.float32),)
forward_net = ForInIfNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.skip(reason="Ascend control multi sink result error")
@pytest.mark.level1
@ -243,16 +220,9 @@ def test_for_in_if_04():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
forward_net = ForInIfNet()
pynative_forward_res = forward_net(x)
for_in_if_net = ForInIfNet()
net = GradNet(for_in_if_net)
expect_backward_res = net(x)
assert graph_forward_res == Tensor([45], mstype.int32)
assert graph_backward_res == Tensor([9], mstype.int32)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == expect_backward_res
@pytest.mark.skip(reason="Ascend control multi sink result error")
@pytest.mark.level1
@ -302,14 +272,5 @@ def test_for_in_if_05():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_if_net = ForInIfNet()
pynative_forward_res = for_in_if_net(x)
for_in_if_net = ForInIfNet()
net = GradNet(for_in_if_net)
expect_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == expect_backward_res
assert graph_forward_res == Tensor([-91], mstype.int32)
assert graph_backward_res == Tensor([13], mstype.int32)

View File

@ -23,6 +23,7 @@ from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.skip(reason="not supported for in while")
def test_for_in_while_01():
class ForInWhileNet(nn.Cell):
@ -67,17 +68,11 @@ def test_for_in_while_01():
graph_forward_res = forward_net(x)
graph_backward_res = backward_net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_while_net = ForInWhileNet()
backward_net = GradNet(for_in_while_net)
expect_forward_res = 0
expect_backward_res = 0
assert graph_forward_res == expect_forward_res
assert graph_backward_res == expect_backward_res
forward_net = ForInWhileNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = backward_net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.skip(reason="not supported for in while")
def test_for_in_while_02():
@ -118,12 +113,7 @@ def test_for_in_while_02():
graph_forward_res = for_in_while_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_while_net = ForInWhileNet()
net = GradNet(for_in_while_net)
pynative_forward_res = for_in_while_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
expect_forward_res = 0
expect_backward_res = 0
assert graph_forward_res == expect_forward_res
assert graph_backward_res == expect_backward_res

View File

@ -23,6 +23,7 @@ from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@ -72,19 +73,11 @@ def test_for_in_for_01():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_for_net = ForInForNet()
net = GradNet(for_in_for_net)
assert graph_forward_res == Tensor([0], mstype.float32)
assert graph_backward_res == (Tensor([0], mstype.int32),)
forward_net = ForInForNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level0
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -128,14 +121,5 @@ def test_for_in_for_02():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_for_net = ForInForNet()
net = GradNet(for_in_for_net)
forward_net = ForInForNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([710], mstype.int32)
assert graph_backward_res == (Tensor([512], mstype.int32),)

View File

@ -124,7 +124,7 @@ class GradNet(nn.Cell):
return grad_all(self.net)(*inputs)
def control_flow_if_after_if(input_net, x, y):
def control_flow_if_after_if(input_net, x, y, expect1, expect2):
# graph mode
context.set_context(mode=context.GRAPH_MODE)
net = input_net()
@ -134,20 +134,11 @@ def control_flow_if_after_if(input_net, x, y):
graph_forward_res = forward_net(x, y)
graph_backward_res = grad_net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
net = input_net()
grad_net = GradNet(net)
forward_net = input_net()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = grad_net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == expect1
assert graph_backward_res == expect2
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -155,10 +146,12 @@ def control_flow_if_after_if(input_net, x, y):
def test_if_after_if():
x = Tensor(2, mstype.int32)
y = Tensor(5, mstype.int32)
control_flow_if_after_if(IfAfterIfNet, x, y)
expect1 = Tensor(13, mstype.int32)
expect2 = (Tensor(0, mstype.int32), Tensor(1, mstype.int32))
control_flow_if_after_if(IfAfterIfNet, x, y, expect1, expect2)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -166,10 +159,12 @@ def test_if_after_if():
def test_if_after_if_01():
x = Tensor(2, mstype.int32)
y = Tensor(5, mstype.int32)
control_flow_if_after_if(IfAfterIfNet1, x, y)
expect1 = Tensor(13, mstype.int32)
expect2 = (Tensor(0, mstype.int32), Tensor(1, mstype.int32))
control_flow_if_after_if(IfAfterIfNet1, x, y, expect1, expect2)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -177,7 +172,9 @@ def test_if_after_if_01():
def test_if_after_if_02():
x = Tensor(2, mstype.int32)
y = Tensor(5, mstype.int32)
control_flow_if_after_if(IfAfterIfNet2, x, y)
expect1 = Tensor(8, mstype.int32)
expect2 = (Tensor(1, mstype.int32), Tensor(1, mstype.int32))
control_flow_if_after_if(IfAfterIfNet2, x, y, expect1, expect2)
@pytest.mark.level0
@ -188,9 +185,12 @@ def test_if_after_if_02():
def test_if_after_if_03():
x = Tensor(2, mstype.int32)
y = Tensor(5, mstype.int32)
control_flow_if_after_if(IfAfterIfNet3, x, y)
expect1 = Tensor(19, mstype.int32)
expect2 = (Tensor(5, mstype.int32), Tensor(2, mstype.int32))
control_flow_if_after_if(IfAfterIfNet3, x, y, expect1, expect2)
# @pytest.mark.skip(reason="Result is not correct in vm ascend.")
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -198,4 +198,6 @@ def test_if_after_if_03():
def test_if_after_if_04():
x = Tensor(2, mstype.int32)
y = Tensor(5, mstype.int32)
control_flow_if_after_if(IfAfterIfNet4, x, y)
expect1 = Tensor(19, mstype.int32)
expect2 = (Tensor(5, mstype.int32), Tensor(2, mstype.int32))
control_flow_if_after_if(IfAfterIfNet4, x, y, expect1, expect2)

View File

@ -57,6 +57,7 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@ -70,11 +71,9 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
assert graph_mode_out == (Tensor(np.array(10), mstype.int32), Tensor(np.array(30), mstype.int32))
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -89,9 +88,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(30), mstype.int32), Tensor(np.array(10), mstype.int32))

View File

@ -23,6 +23,7 @@ from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@ -73,17 +74,9 @@ def test_if_after_for_01():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_after_for_net = IfAfterForNet()
net = GradNet(if_after_for_net)
assert graph_forward_res == Tensor([39], mstype.float32)
assert graph_backward_res == (Tensor([13], mstype.int32),)
forward_net = IfAfterForNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -135,14 +128,5 @@ def test_if_after_for_02():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_after_for_net = IfAfterForNet()
net = GradNet(if_after_for_net)
forward_net = IfAfterForNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([195], mstype.int32)
assert graph_backward_res == (Tensor([0], mstype.int32),)

View File

@ -127,7 +127,7 @@ class GradNet(nn.Cell):
return grad_all(self.net)(*inputs)
def control_flow_if_after_if_in_if(input_net, x):
def control_flow_if_after_if_in_if(input_net, x, expect1, expect2):
# graph mode
context.set_context(mode=context.GRAPH_MODE)
net = input_net()
@ -137,33 +137,32 @@ def control_flow_if_after_if_in_if(input_net, x):
graph_forward_res = forward_net(x)
graph_backward_res = grad_net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
net = input_net()
grad_net = GradNet(net)
forward_net = input_net()
pynative_forward_res = forward_net(x)
pynative_backward_res = grad_net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == expect1
assert graph_backward_res == expect2
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_after_if_in_if():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_if(IfAfterIfInIfNet, x)
expect1 = Tensor(14, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_if(IfAfterIfInIfNet, x, expect1, expect2)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_after_if_in_if_01():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_if(IfAfterIfInIfNet1, x)
expect1 = Tensor(14, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_if(IfAfterIfInIfNet1, x, expect1, expect2)
@pytest.mark.level0
@ -171,12 +170,28 @@ def test_if_after_if_in_if_01():
@pytest.mark.env_onecard
def test_if_after_if_in_if_02():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_if(IfAfterIfInIfNet2, x)
expect1 = Tensor(12, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_if(IfAfterIfInIfNet2, x, expect1, expect2)
@pytest.mark.skip(reason="Handle Call Return error in multigraph sink.")
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_after_if_in_if_02_ascend():
x = Tensor(2, mstype.int32)
expect1 = Tensor(12, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_if(IfAfterIfInIfNet2, x, expect1, expect2)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_if_after_if_in_if_03():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_if(IfAfterIfInIfNet3, x)
expect1 = Tensor(6, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_if(IfAfterIfInIfNet3, x, expect1, expect2)

View File

@ -44,7 +44,7 @@ class ForwardNet(nn.Cell):
if out >= 30:
self.weight = out
out = out - 30
return out, self.weight
return out
class BackwardNet(nn.Cell):
@ -57,7 +57,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -69,13 +70,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
@pytest.mark.level1
assert graph_mode_out == Tensor(np.array(21), mstype.int32)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -88,9 +87,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(21), mstype.int32), Tensor(np.array(7), mstype.int32))

View File

@ -119,7 +119,7 @@ class GradNet(nn.Cell):
return grad_all(self.net)(*inputs)
def control_flow_if_after_if_in_for(input_net, x):
def control_flow_if_after_if_in_for(input_net, x, expect1, expect2):
# graph mode
context.set_context(mode=context.GRAPH_MODE)
net = input_net()
@ -129,17 +129,9 @@ def control_flow_if_after_if_in_for(input_net, x):
graph_forward_res = forward_net(x)
graph_backward_res = grad_net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
net = input_net()
grad_net = GradNet(net)
assert graph_forward_res == expect1
assert graph_backward_res == expect2
forward_net = input_net()
pynative_forward_res = forward_net(x)
pynative_backward_res = grad_net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.skip(reason="ME EvalCNode error")
@pytest.mark.level1
@ -149,7 +141,10 @@ def control_flow_if_after_if_in_for(input_net, x):
@pytest.mark.env_onecard
def test_if_after_if_in_for():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_for(IfAfterIfInForNet, x)
expect1 = Tensor(14, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_for(IfAfterIfInForNet, x, expect1, expect2)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -158,7 +153,10 @@ def test_if_after_if_in_for():
@pytest.mark.env_onecard
def test_if_after_if_in_for_01():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_for(IfAfterIfInForNet1, x)
expect1 = Tensor(14, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_for(IfAfterIfInForNet1, x, expect1, expect2)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -167,7 +165,10 @@ def test_if_after_if_in_for_01():
@pytest.mark.env_onecard
def test_if_after_if_in_for_02():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_for(IfAfterIfInForNet2, x)
expect1 = Tensor(14, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_for(IfAfterIfInForNet2, x, expect1, expect2)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -176,4 +177,6 @@ def test_if_after_if_in_for_02():
@pytest.mark.env_onecard
def test_if_after_if_in_for_03():
x = Tensor(2, mstype.int32)
control_flow_if_after_if_in_for(IfAfterIfInForNet3, x)
expect1 = Tensor(11, mstype.int32)
expect2 = (Tensor(1, mstype.int32),)
control_flow_if_after_if_in_for(IfAfterIfInForNet3, x, expect1, expect2)

View File

@ -36,7 +36,7 @@ class ForwardNet(nn.Cell):
def construct(self, x, y):
out = self.zero
i = self.i
if x > y:
if x < y:
while i < self.max_cycles:
out = x * y + out
i = i + 1
@ -57,7 +57,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -69,13 +70,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
@pytest.mark.level1
assert graph_mode_out == (Tensor(np.array(-11), mstype.int32), Tensor(np.array(9), mstype.int32))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -88,9 +87,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(9), mstype.int32), Tensor(np.array(3), mstype.int32))

View File

@ -60,7 +60,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -72,14 +73,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
assert graph_mode_out == (Tensor(np.array(-11), mstype.int32), Tensor(np.array(9), mstype.int32))
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -92,12 +90,8 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(9), mstype.int32), Tensor(np.array(3), mstype.int32))
class ForwardNetNoAssign(nn.Cell):
@ -146,9 +140,5 @@ def test_backward_no_assign():
graph_forward_net = ForwardNetNoAssign(max_cycles=3)
graph_backward_net = BackwardNetNoAssign(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNetNoAssign(max_cycles=3)
pynative_backward_net = BackwardNetNoAssign(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(9), mstype.int32), Tensor(np.array(3), mstype.int32))

View File

@ -43,7 +43,7 @@ class ForwardNet(nn.Cell):
if out > 20:
F.assign(self.weight, out)
out = out - 20
return out, self.weight
return out
class BackwardNet(nn.Cell):
@ -56,7 +56,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -68,14 +69,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
assert graph_mode_out == Tensor(np.array(9), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -88,13 +86,8 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
#expect = (Tensor(np.array(6), mstype.int32), Tensor(np.array(3), mstype.int32))
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(6), mstype.int32), Tensor(np.array(3), mstype.int32))
class ForwardNetNoAssign(nn.Cell):
@ -116,6 +109,7 @@ class ForwardNetNoAssign(nn.Cell):
out = out - 20
return out, self.weight
class BackwardNetNoAssign(nn.Cell):
def __init__(self, net):
super(BackwardNetNoAssign, self).__init__(auto_prefix=False)
@ -140,9 +134,5 @@ def test_backward_no_assign():
graph_forward_net = ForwardNetNoAssign(max_cycles=3)
graph_backward_net = BackwardNetNoAssign(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNetNoAssign(max_cycles=3)
pynative_backward_net = BackwardNetNoAssign(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(6), mstype.int32), Tensor(np.array(3), mstype.int32))

View File

@ -20,7 +20,9 @@ from mindspore.common import dtype as mstype
from mindspore.common.parameter import Parameter
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -62,14 +64,5 @@ def test_if_after_for_in_if():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_after_for_in_if_net = IfAfterForInIfNet()
net = GradNet(if_after_for_in_if_net)
forward_net = IfAfterForInIfNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor(0, mstype.int32)
assert graph_backward_res == (Tensor(1, mstype.int32),)

View File

@ -21,6 +21,7 @@ from mindspore.common.parameter import Parameter
grad_all = C.GradOperation(get_all=True)
@pytest.mark.skip(reason="not supported for in while")
def test_if_after_for_in_while():
class IfAfterForInWhileNet(nn.Cell):

View File

@ -20,7 +20,9 @@ from mindspore.common import dtype as mstype
from mindspore.common.parameter import Parameter
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -62,14 +64,5 @@ def test_if_after_for_in_for():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_after_for_in_for_net = IfAfterForInForNet()
net = GradNet(if_after_for_in_for_net)
forward_net = IfAfterForInForNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor(12285, mstype.int32)
assert graph_backward_res == (Tensor(1025, mstype.int32),)

View File

@ -42,7 +42,7 @@ class ForwardNet(nn.Cell):
out = x * y + out
i = i + 1
self.weight = i
return out, self.weight
return out
class BackwardNet(nn.Cell):
@ -55,7 +55,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -67,13 +68,10 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
assert graph_mode_out == Tensor(np.array(30), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -86,9 +84,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(30), mstype.int32), Tensor(np.array(10), mstype.int32))

View File

@ -53,7 +53,7 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -65,12 +65,8 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(18), mstype.int32)
@pytest.mark.skip(reason="Ascend kernel compiler error!")
@pytest.mark.level1

View File

@ -55,7 +55,7 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -67,15 +67,10 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(18), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -88,10 +83,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(18), mstype.int32)

View File

@ -43,7 +43,8 @@ class ForwardNet(nn.Cell):
self.weight = i
out = x * y + out
i = i + 1
return out, self.weight
out1 = self.weight + 1
return out, out1
class BackwardNet(nn.Cell):
@ -56,7 +57,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -68,13 +70,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
@pytest.mark.level1
assert graph_mode_out == (Tensor(np.array(30), mstype.int32), Tensor(np.array(10), mstype.int32))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -87,9 +87,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(30), mstype.int32), Tensor(np.array(10), mstype.int32))

View File

@ -46,7 +46,8 @@ class ForwardNet(nn.Cell):
self.weight = i
out = out + 10
i = i + 1
return out, self.weight
out1 = self.weight + 1
return out, out1
class BackwardNet(nn.Cell):
@ -59,7 +60,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -71,13 +73,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
@pytest.mark.level1
assert graph_mode_out == (Tensor(np.array(121), mstype.int32), Tensor(np.array(10), mstype.int32))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -90,9 +90,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(21), mstype.int32), Tensor(np.array(7), mstype.int32))

View File

@ -43,7 +43,8 @@ class ForwardNet(nn.Cell):
out = out + 10
i = i + 1
self.weight = self.weight - i
return out, self.weight
out1 = self.weight + 1
return out, out1
class BackwardNet(nn.Cell):
@ -56,7 +57,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -68,13 +70,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
@pytest.mark.level1
assert graph_mode_out == (Tensor(np.array(60), mstype.int32), Tensor(np.array(10), mstype.int32))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -87,12 +87,8 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(10), mstype.int32), Tensor(np.array(6), mstype.int32))
class ForwardNetNoAssign(nn.Cell):
@ -134,9 +130,5 @@ def test_backward_no_assign():
graph_forward_net = ForwardNetNoAssign(max_cycles=3)
graph_backward_net = BackwardNetNoAssign(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNetNoAssign(max_cycles=3)
pynative_backward_net = BackwardNetNoAssign(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(10), mstype.int32), Tensor(np.array(6), mstype.int32))

View File

@ -36,7 +36,7 @@ class ForwardNet(nn.Cell):
def construct(self, x, y):
out = self.zero
i = self.i
if x > y:
if x < y:
while i < self.max_cycles:
out = x * y + out
i = i + 1
@ -44,7 +44,8 @@ class ForwardNet(nn.Cell):
while out > 20:
self.weight = out
out = out - 20
return out, self.weight
out1 = self.weight + 1
return out, out1
class BackwardNet(nn.Cell):
@ -57,7 +58,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -69,12 +71,11 @@ def test_forward():
# Graph Mode
context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
assert graph_mode_out == pynative_mode_out
@pytest.mark.level1
assert graph_mode_out == (Tensor(np.array(9), mstype.int32), Tensor(np.array(4), mstype.int32))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -87,7 +88,5 @@ def test_backward():
# Graph Mode
context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(9), mstype.int32), Tensor(np.array(3), mstype.int32))

View File

@ -58,7 +58,7 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -70,15 +70,10 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(36), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -91,10 +86,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(36), mstype.int32)

View File

@ -56,7 +56,7 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -68,15 +68,10 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(36), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -89,10 +84,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(36), mstype.int32)

View File

@ -34,14 +34,15 @@ class ForwardNet(nn.Cell):
def construct(self, x, y):
out = self.zero
if x > y:
if x < y:
for _ in range(0, self.max_cycles):
self.weight = out
out = x * y + out
while out > 20:
self.weight = out
out = out - 20
return out, self.weight
out1 = self.weight + 1
return out, out1
class BackwardNet(nn.Cell):
@ -54,7 +55,8 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -66,13 +68,11 @@ def test_forward():
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out
@pytest.mark.level1
assert graph_mode_out == (Tensor(np.array(9), mstype.int32), Tensor(np.array(7), mstype.int32))
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -85,9 +85,5 @@ def test_backward():
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads
assert graph_mode_grads == (Tensor(np.array(9), mstype.int32), Tensor(np.array(3), mstype.int32))

View File

@ -55,6 +55,7 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.skip(reason="not supported for in while")
def test_forward():
x = Tensor(np.array(1), mstype.int32)
@ -63,6 +64,7 @@ def test_forward():
out = forward_net(x, y)
print("forward out:", out)
@pytest.mark.skip(reason="not supported for in while")
def test_backward():
x = Tensor(np.array(1), mstype.int32)

View File

@ -55,7 +55,7 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -67,12 +67,8 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(36), mstype.int32)
@pytest.mark.skip(reason="Ascend kernel compiler error!")
@pytest.mark.level1

View File

@ -20,7 +20,9 @@ from mindspore.common import dtype as mstype
from mindspore.common.parameter import Parameter
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -61,14 +63,5 @@ def test_for_after_if():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_if_net = ForAfterIfNet()
net = GradNet(for_after_if_net)
forward_net = ForAfterIfNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor(150, mstype.int32)
assert graph_backward_res == (Tensor(5, mstype.int32),)

View File

@ -53,7 +53,7 @@ class BackwardNet(nn.Cell):
return grads
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -65,15 +65,10 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(18), mstype.int32)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -86,10 +81,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(18), mstype.int32)

View File

@ -22,6 +22,8 @@ from mindspore.ops import operations as P
from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@ -74,17 +76,9 @@ def test_for_after_for_01():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_for_net = ForAfterForNet()
net = GradNet(for_after_for_net)
assert graph_forward_res == Tensor([8], mstype.float32)
assert graph_backward_res == (Tensor([4], mstype.int32),)
forward_net = ForAfterForNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -136,13 +130,5 @@ def test_for_after_for_02():
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_for_net = ForAfterForNet()
net = GradNet(for_after_for_net)
forward_net = ForAfterForNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([1620], mstype.int32)
assert graph_backward_res == (Tensor([20], mstype.int32),)

View File

@ -20,7 +20,11 @@ from mindspore.common import dtype as mstype
from mindspore.common.parameter import Parameter
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.skip(
reason="GPU backward result error!Maybe redudunt graph cause.") # Ascend is passed because run with multi graph sink.
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -41,7 +45,7 @@ def test_for_after_if_in_if():
x += self.param_a
self.param_b += 2
for _ in range(0, 5):
x += self.param_b
out += self.param_b
out *= x
return out
@ -64,14 +68,55 @@ def test_for_after_if_in_if():
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
assert graph_forward_res == Tensor(715, mstype.int32)
assert graph_backward_res == (Tensor(55, mstype.int32),)
@pytest.mark.skip(reason="Ascend vm backward result error!Maybe redudunt graph cause.")
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_for_after_if_in_if_in_vm():
class ForAfterIfInIfNet(nn.Cell):
def __init__(self):
super().__init__()
self.param_a = Parameter(Tensor(5, mstype.int32), name='a')
self.param_b = Parameter(Tensor(4, mstype.int32), name='b')
def construct(self, x):
out = self.param_a
while x < 0:
x += 1
if self.param_a > self.param_b:
x += 3
if x > self.param_a:
self.param_b += 4
x += self.param_a
self.param_b += 2
for _ in range(0, 5):
out += self.param_b
out *= x
return out
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
def construct(self, *inputs):
return grad_all(self.net)(*inputs)
x = Tensor(5, mstype.int32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
for_after_if_in_if_net = ForAfterIfInIfNet()
net = GradNet(for_after_if_in_if_net)
forward_net = ForAfterIfInIfNet()
pynative_forward_res = forward_net(x)
pynative_backward_res = net(x)
graph_forward_res = forward_net(x)
graph_backward_res = net(x)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor(715, mstype.int32)
assert graph_backward_res == (Tensor(55, mstype.int32),)

View File

@ -69,12 +69,7 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(13), mstype.int32)
@pytest.mark.level1
@ -90,10 +85,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(12), mstype.int32)

View File

@ -22,7 +22,9 @@ from mindspore.ops import operations as P
from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -87,19 +89,11 @@ def test_for_after_while_in_if_01():
graph_forward_res = forward_net(x, y)
graph_backward_res = net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_while_in_if_net = ForAfterWhileInIfNet()
net = GradNet(for_after_while_in_if_net)
assert graph_forward_res == Tensor([0], mstype.float32)
assert graph_backward_res == (Tensor([0], mstype.int32), Tensor([0], mstype.int32))
forward_net = ForAfterWhileInIfNet()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -119,7 +113,6 @@ def test_for_after_while_in_if_02():
param_c = np.full((1,), 11, dtype=np.int32)
self.param_c = Parameter(Tensor(param_c), name='c')
def construct(self, x, y):
self.assign(self.param_a, x + self.param_a)
y = self.add(y, self.param_b)
@ -155,14 +148,5 @@ def test_for_after_while_in_if_02():
graph_forward_res = forward_net(x, y)
graph_backward_res = net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_while_in_if_net = ForAfterWhileInIfNet()
net = GradNet(for_after_while_in_if_net)
forward_net = ForAfterWhileInIfNet()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([126], mstype.int32)
assert graph_backward_res == (Tensor([0], mstype.int32), Tensor([0], mstype.int32))

View File

@ -57,6 +57,7 @@ class BackwardNet(nn.Cell):
grads = self.grad(self.forward_net)(*inputs)
return grads
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@ -69,12 +70,8 @@ def test_forward():
forward_net = ForwardNet(max_cycles=3)
graph_out = forward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
pynative_out = forward_net(x, y)
assert graph_out == pynative_out
assert graph_out == Tensor(np.array(36), mstype.int32)
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training
@ -89,10 +86,4 @@ def test_backward():
backward_net = BackwardNet(forward_net)
graph_grads = backward_net(x, y)
context.set_context(mode=context.PYNATIVE_MODE)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
pynative_grads = backward_net(x, y)
assert graph_grads == pynative_grads
assert graph_grads == Tensor(np.array(36), mstype.int32)

View File

@ -22,7 +22,9 @@ from mindspore.ops import operations as P
from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -88,19 +90,11 @@ def test_for_after_while_in_for_01():
graph_forward_res = forward_net(x, y)
graph_backward_res = net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_while_in_for_net = ForAfterWhileInForNet()
net = GradNet(for_after_while_in_for_net)
assert graph_forward_res == Tensor([12], mstype.float32)
assert graph_backward_res == (Tensor([0], mstype.int32), Tensor([0], mstype.int32))
forward_net = ForAfterWhileInForNet()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -156,14 +150,5 @@ def test_for_after_while_in_for_02():
graph_forward_res = forward_net(x, y)
graph_backward_res = net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_while_in_for_net = ForAfterWhileInForNet()
net = GradNet(for_after_while_in_for_net)
forward_net = ForAfterWhileInForNet()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([-1020], mstype.int32)
assert graph_backward_res == (Tensor([0], mstype.int32), Tensor([0], mstype.int32))

View File

@ -21,6 +21,7 @@ from mindspore.common.parameter import Parameter
grad_all = C.GradOperation(get_all=True)
@pytest.mark.skip(reason="not supported side effect")
@pytest.mark.level1
@pytest.mark.platform_x86_gpu_training

View File

@ -22,6 +22,8 @@ from mindspore.ops import operations as P
from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.skip(reason="not supported for in while")
def test_for_after_for_in_while_01():
class ForAfterForInWhileNet(nn.Cell):
@ -91,6 +93,7 @@ def test_for_after_for_in_while_01():
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.skip(reason="not supported for in while")
def test_for_after_for_in_while_02():
class ForAfterForInWhileNet(nn.Cell):

View File

@ -23,7 +23,8 @@ from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True)
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -80,19 +81,11 @@ def test_for_after_for_in_for_01():
graph_forward_res = forward_net(x, y)
graph_backward_res = net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_for_in_for_net = ForAfterForInForNet()
net = GradNet(for_after_for_in_for_net)
assert graph_forward_res == Tensor([71], mstype.float32)
assert graph_backward_res == (Tensor([0], mstype.int32), Tensor([1], mstype.int32))
forward_net = ForAfterForInForNet()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
@pytest.mark.level1
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -145,14 +138,5 @@ def test_for_after_for_in_for_02():
graph_forward_res = forward_net(x, y)
graph_backward_res = net(x, y)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_after_for_in_for_net = ForAfterForInForNet()
net = GradNet(for_after_for_in_for_net)
forward_net = ForAfterForInForNet()
pynative_forward_res = forward_net(x, y)
pynative_backward_res = net(x, y)
assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res
assert graph_forward_res == Tensor([240], mstype.int32)
assert graph_backward_res == (Tensor([0], mstype.int32), Tensor([1], mstype.int32))

View File

@ -64,14 +64,8 @@ def test_while_grad():
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), pynative_output[1].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[2].asnumpy(), pynative_output[2].asnumpy(), 0.0001, 0.0001)
assert graph_output == 0
@pytest.mark.level0
@ -210,11 +204,9 @@ def test_while_endless_case():
context.set_context(mode=context.GRAPH_MODE)
net = MyWhileNet()
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
net = MyWhileNet()
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = np.array([[[4, 6], [8, 10]],
[[4, 6], [8, 10]]]).astype(np.float32)
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -284,18 +276,16 @@ def test_while_with_param_forward_with_const_branch():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(4), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = while_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = while_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = np.array([[[0, 4], [8, 12]],
[[16, 20], [24, 28]]]).astype(np.float32)
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -339,15 +329,16 @@ def test_while_opt_endless():
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect1 = 0
expect2 = 0
expect3 = np.array([[[16, 16], [16, 16]],
[[16, 16], [16, 16]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect1, 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), expect2, 0.0001, 0.0001)
assert np.allclose(graph_output[2].asnumpy(), expect3, 0.0001, 0.0001)
@pytest.mark.skip(reason="not supported yet")
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -371,18 +362,16 @@ def test_no_while_call():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(4), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = while_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = while_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = np.array([[[0, 1], [2, 3]],
[[4, 5], [6, 7]]]).astype(np.float32)
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -419,21 +408,18 @@ def test_while_with_param_grad_with_const_branch():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(4), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[4, 4], [4, 4]],
[[4, 4], [4, 4]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.skip(reason="not supported yet")
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -471,18 +457,16 @@ def test_for_while_with_param_grad_with_const_branch():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(4), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[8, 8], [8, 8]],
[[8, 8], [8, 8]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -519,18 +503,15 @@ def test_for_while_with_param_grad_basic():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(4), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[8, 8], [8, 8]],
[[8, 8], [8, 8]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -567,18 +548,15 @@ def test_for_while_with_param_grad_normal():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(4), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[8, 8], [8, 8]],
[[8, 8], [8, 8]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -612,18 +590,15 @@ def test_while_with_param_basic_grad():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(3), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[4, 4], [4, 4]],
[[4, 4], [4, 4]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -657,18 +632,15 @@ def test_while_with_param_basic_grad_mul():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(3), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[1, 4], [13, 28]],
[[49, 76], [109, 148]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -703,19 +675,19 @@ def test_while_with_param_basic_grad_two():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(3), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), pynative_output[1].asnumpy(), 0.0001, 0.0001)
expect1 = np.array([[[4, 4], [4, 4]],
[[4, 4], [4, 4]]]).astype(np.float32)
expect2 = np.array([[[3, 3], [3, 3]],
[[3, 3], [3, 3]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect1, 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), expect2, 0.0001, 0.0001)
@pytest.mark.level0
@ -751,20 +723,21 @@ def test_while_with_param_basic_grad_three():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(3), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), pynative_output[1].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[2].asnumpy(), pynative_output[2].asnumpy(), 0.0001, 0.0001)
expect1 = np.array([[[4, 4], [4, 4]],
[[4, 4], [4, 4]]]).astype(np.float32)
expect2 = np.array([[[3, 3], [3, 3]],
[[3, 3], [3, 3]]]).astype(np.float32)
expect3 = np.array([[[3, 3], [3, 3]],
[[3, 3], [3, 3]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect1, 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), expect2, 0.0001, 0.0001)
assert np.allclose(graph_output[2].asnumpy(), expect3, 0.0001, 0.0001)
@pytest.mark.level0
@ -802,20 +775,15 @@ def test_while_if_with_param_grad():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(3), dtype=ms.int32)
x = Tensor(np.ones([2, 2, 2]).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[5, 5], [5, 5]],
[[5, 5], [5, 5]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.skip(reason="not supported yet")
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@ -825,8 +793,8 @@ def test_while_with_param_grad_not_enter_while():
def __init__(self):
super().__init__()
self.max = P.ReduceMax()
self.param = Parameter(Tensor(np.arange(2 * 2 * 2).reshape((2, 2, 2)), ms.float32), name="weight")
self.zero = Tensor(np.zeros(([2, 2, 2])), ms.float32)
self.param = Parameter(Tensor(2, ms.float32), name="weight")
self.zero = Tensor(0, ms.float32)
def construct(self, idx, end, x):
out = self.zero
@ -846,18 +814,14 @@ def test_while_with_param_grad_not_enter_while():
idx = Tensor(np.array(3), dtype=ms.int32)
end = Tensor(np.array(0), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(2, dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
while_net = MyWhileNet()
net = GradNet(while_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[0].asnumpy(), 1, 0.0001, 0.0001)
@pytest.mark.level0
@ -892,12 +856,9 @@ def test_with_param_if_by_if_forward():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = np.array([[[3, 4], [5, 6]],
[[7, 8], [9, 10]]]).astype(np.float32)
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -930,20 +891,19 @@ def test_with_param_if_by_if_grad_inputs():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(0), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), pynative_output[1].asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[2].asnumpy(), pynative_output[2].asnumpy(), 0.0001, 0.0001)
expect1 = Tensor(np.array(0), dtype=ms.int32)
expect2 = Tensor(np.array(0), dtype=ms.int32)
expect3 = np.array([[[3, 3], [3, 3]],
[[3, 3], [3, 3]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect1.asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[1].asnumpy(), expect2.asnumpy(), 0.0001, 0.0001)
assert np.allclose(graph_output[2].asnumpy(), expect3, 0.0001, 0.0001)
@pytest.mark.level0
@ -977,18 +937,16 @@ def test_with_param_if_by_if_grad_parameter():
idx = Tensor(np.array(0), dtype=ms.int32)
end = Tensor(np.array(2), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[2, 2], [2, 2]],
[[2, 2], [2, 2]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -1020,18 +978,16 @@ def test_with_param_if_by_if_grad_param_excute_null():
idx = Tensor(np.array(4), dtype=ms.int32)
end = Tensor(np.array(0), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[0, 0], [0, 0]],
[[0, 0], [0, 0]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1065,18 +1021,16 @@ def test_if_by_if_return_inside_grad():
idx = Tensor(np.array(1), dtype=ms.int32)
end = Tensor(np.array(0), dtype=ms.int32)
x = Tensor(np.random.randn(2, 2, 2).astype(np.float32), dtype=ms.float32)
x = Tensor(np.arange(8).reshape(2, 2, 2).astype(np.float32), dtype=ms.float32)
# graph mode
context.set_context(mode=context.GRAPH_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = GradNet(if_net)
pynative_output = net(idx, end, x)
assert np.allclose(graph_output[0].asnumpy(), pynative_output[0].asnumpy(), 0.0001, 0.0001)
expect = np.array([[[3, 3], [3, 3]],
[[3, 3], [3, 3]]]).astype(np.float32)
assert np.allclose(graph_output[0].asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1117,12 +1071,8 @@ def test_if_by_if_forward():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 19.11111
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -1190,12 +1140,8 @@ def test_if_by_if_forward_control_tuple_switch():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 4.444444
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level0
@ -1261,12 +1207,8 @@ def test_if_by_if_forward_control_inside_net():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 4.444444
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1307,12 +1249,8 @@ def test_if_by_if_forward_use_namespace():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 4.444444
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1357,12 +1295,9 @@ def test_if_by_if_forward_use_global_op():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 4.444444
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1394,12 +1329,9 @@ def test_for_with_if_by_if_forward():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 18.0
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1433,12 +1365,9 @@ def test_for_with_if_by_if_forward_namespace():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 18.0
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1483,12 +1412,9 @@ def test_if_by_if_forward_const_branch_inner():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 240.0
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1
@ -1533,12 +1459,9 @@ def test_if_by_if_forward_all_const_branch():
if_net = MyIfByIfNet()
net = if_net
graph_output = net(idx, end, x)
# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
if_net = MyIfByIfNet()
net = if_net
pynative_output = net(idx, end, x)
assert np.allclose(graph_output.asnumpy(), pynative_output.asnumpy(), 0.0001, 0.0001)
expect = 240.0
assert np.allclose(graph_output.asnumpy(), expect, 0.0001, 0.0001)
@pytest.mark.level1