while test cases

This commit is contained in:
lanzhineng 2021-04-20 10:22:30 +08:00
parent b77da73928
commit 65479c7646
2 changed files with 138 additions and 0 deletions

View File

@ -0,0 +1,70 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")
class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32)
def construct(self, x, y):
out = self.zero
j = self.i
while j < self.max_cycles:
i = self.i
while i < self.max_cycles:
out = x * y + out
i = i + 1
j = j + 1
return out
class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()
def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads
def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)
def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

View File

@ -0,0 +1,68 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")
class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32)
def construct(self, x, y):
out = self.zero
for _ in range(0, self.max_cycles):
i = self.i
while i < self.max_cycles:
out = x * y + out
i = i + 1
return out
class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()
def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads
def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)
def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)