From 39c0a5b7dad4b94bfbb8dd75f6ddde1aefcf58db Mon Sep 17 00:00:00 2001 From: lanzhineng Date: Wed, 21 Apr 2021 09:38:43 +0800 Subject: [PATCH] while cases 2 --- .../st/control/inner/test_201_for_n_while.py | 69 +++++++++++++++++ .../control/inner/test_202_while_n_while.py | 71 ++++++++++++++++++ .../inner/test_221_while_while_while.py | 74 +++++++++++++++++++ .../control/inner/test_222_for_while_while.py | 72 ++++++++++++++++++ 4 files changed, 286 insertions(+) create mode 100644 tests/st/control/inner/test_201_for_n_while.py create mode 100644 tests/st/control/inner/test_202_while_n_while.py create mode 100644 tests/st/control/inner/test_221_while_while_while.py create mode 100644 tests/st/control/inner/test_222_for_while_while.py diff --git a/tests/st/control/inner/test_201_for_n_while.py b/tests/st/control/inner/test_201_for_n_while.py new file mode 100644 index 00000000000..ea0f3e80a6c --- /dev/null +++ b/tests/st/control/inner/test_201_for_n_while.py @@ -0,0 +1,69 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import numpy as np +from mindspore.common import dtype as mstype +from mindspore import nn +from mindspore import Tensor +from mindspore.ops import composite as C +from mindspore import context + +context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") + + +class ForwardNet(nn.Cell): + def __init__(self, max_cycles=10): + super(ForwardNet, self).__init__() + self.max_cycles = max_cycles + self.zero = Tensor(np.array(0), mstype.int32) + self.i = Tensor(np.array(0), mstype.int32) + + def construct(self, x, y): + out = self.zero + for _ in range(0, self.max_cycles): + out = x * y + out + i = self.i + while i < self.max_cycles: + out = x * y + out + i = i + 1 + return out + + +class BackwardNet(nn.Cell): + def __init__(self, net): + super(BackwardNet, self).__init__(auto_prefix=False) + self.forward_net = net + self.grad = C.GradOperation() + + def construct(self, *inputs): + grads = self.grad(self.forward_net)(*inputs) + return grads + + +def test_forward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + out = forward_net(x, y) + print("forward out:", out) + + +def test_backward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + backward_net = BackwardNet(forward_net) + grads = backward_net(x, y) + print("grads:", grads) diff --git a/tests/st/control/inner/test_202_while_n_while.py b/tests/st/control/inner/test_202_while_n_while.py new file mode 100644 index 00000000000..f0978012195 --- /dev/null +++ b/tests/st/control/inner/test_202_while_n_while.py @@ -0,0 +1,71 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import numpy as np +from mindspore.common import dtype as mstype +from mindspore import nn +from mindspore import Tensor +from mindspore.ops import composite as C +from mindspore import context + +context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") + + +class ForwardNet(nn.Cell): + def __init__(self, max_cycles=10): + super(ForwardNet, self).__init__() + self.max_cycles = max_cycles + self.zero = Tensor(np.array(0), mstype.int32) + self.i = Tensor(np.array(0), mstype.int32) + + def construct(self, x, y): + out = self.zero + i = self.i + while i < self.max_cycles: + out = x * y + out + i = i + 1 + i = self.i + while i < self.max_cycles: + out = x * y + out + i = i + 1 + return out + + +class BackwardNet(nn.Cell): + def __init__(self, net): + super(BackwardNet, self).__init__(auto_prefix=False) + self.forward_net = net + self.grad = C.GradOperation() + + def construct(self, *inputs): + grads = self.grad(self.forward_net)(*inputs) + return grads + + +def test_forward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + out = forward_net(x, y) + print("forward out:", out) + + +def test_backward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + backward_net = BackwardNet(forward_net) + grads = backward_net(x, y) + print("grads:", grads) diff --git a/tests/st/control/inner/test_221_while_while_while.py b/tests/st/control/inner/test_221_while_while_while.py new file mode 100644 index 00000000000..48130e564fc --- /dev/null +++ b/tests/st/control/inner/test_221_while_while_while.py @@ -0,0 +1,74 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import numpy as np +from mindspore.common import dtype as mstype +from mindspore import nn +from mindspore import Tensor +from mindspore.ops import composite as C +from mindspore import context + +context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") + + +class ForwardNet(nn.Cell): + def __init__(self, max_cycles=10): + super(ForwardNet, self).__init__() + self.max_cycles = max_cycles + self.zero = Tensor(np.array(0), mstype.int32) + self.i = Tensor(np.array(0), mstype.int32) + + def construct(self, x, y): + out = self.zero + i = self.i + while i < self.max_cycles: + j = self.i + while j < self.max_cycles: + out = x * y + out + j = j + 1 + i = i + 1 + i = self.i + while i < self.max_cycles: + out = x * y + out + i = i + 1 + return out + + +class BackwardNet(nn.Cell): + def __init__(self, net): + super(BackwardNet, self).__init__(auto_prefix=False) + self.forward_net = net + self.grad = C.GradOperation() + + def construct(self, *inputs): + grads = self.grad(self.forward_net)(*inputs) + return grads + + +def test_forward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + out = forward_net(x, y) + print("forward out:", out) + + +def test_backward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + backward_net = BackwardNet(forward_net) + grads = backward_net(x, y) + print("grads:", grads) diff --git a/tests/st/control/inner/test_222_for_while_while.py b/tests/st/control/inner/test_222_for_while_while.py new file mode 100644 index 00000000000..9dc5e134c92 --- /dev/null +++ b/tests/st/control/inner/test_222_for_while_while.py @@ -0,0 +1,72 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +import numpy as np +from mindspore.common import dtype as mstype +from mindspore import nn +from mindspore import Tensor +from mindspore.ops import composite as C +from mindspore import context + +context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") + + +class ForwardNet(nn.Cell): + def __init__(self, max_cycles=10): + super(ForwardNet, self).__init__() + self.max_cycles = max_cycles + self.zero = Tensor(np.array(0), mstype.int32) + self.i = Tensor(np.array(0), mstype.int32) + + def construct(self, x, y): + out = self.zero + for _ in range(0, self.max_cycles): + j = self.i + while j < self.max_cycles: + out = x * y + out + j = j + 1 + i = self.i + while i < self.max_cycles: + out = x * y + out + i = i + 1 + return out + + +class BackwardNet(nn.Cell): + def __init__(self, net): + super(BackwardNet, self).__init__(auto_prefix=False) + self.forward_net = net + self.grad = C.GradOperation() + + def construct(self, *inputs): + grads = self.grad(self.forward_net)(*inputs) + return grads + + +def test_forward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + out = forward_net(x, y) + print("forward out:", out) + + +def test_backward(): + x = Tensor(np.array(1), mstype.int32) + y = Tensor(np.array(3), mstype.int32) + forward_net = ForwardNet(max_cycles=3) + backward_net = BackwardNet(forward_net) + grads = backward_net(x, y) + print("grads:", grads)