!1044 clean pylint warning in test dir

Merge pull request !1044 from jinyaohui/clean_pylint_test
This commit is contained in:
mindspore-ci-bot 2020-05-11 22:07:46 +08:00 committed by Gitee
commit 2bc3fcb1c1
103 changed files with 954 additions and 588 deletions

View File

@ -20,8 +20,11 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
context.set_context(enable_task_sink=True)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,9 +33,11 @@ class Net(nn.Cell):
def construct(self, x, y):
return self.add(x, y)
x = np.ones([1, 3, 3, 4]).astype(np.float32)
y = np.ones([1, 3, 3, 4]).astype(np.float32)
def test_net():
add = Net()
output = add(Tensor(x), Tensor(y))

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,6 +32,7 @@ class Net(nn.Cell):
def construct(self, x, y):
return self.add((x, y))
def test_net():
x = np.random.randn(1, 3, 3, 4).astype(np.float32)
y = np.random.randn(1, 3, 3, 4).astype(np.float32)

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -35,6 +38,7 @@ def test_net_bool():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_int8():
x = np.random.randn(1, 16, 1, 1).astype(np.int8)
net = Net()
@ -42,6 +46,7 @@ def test_net_int8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_uint8():
x = np.random.randn(1, 16, 1, 1).astype(np.uint8)
net = Net()
@ -49,6 +54,7 @@ def test_net_uint8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_int16():
x = np.random.randn(1, 16, 1, 1).astype(np.int16)
net = Net()
@ -56,6 +62,7 @@ def test_net_int16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_uint16():
x = np.random.randn(1, 16, 1, 1).astype(np.uint16)
net = Net()
@ -63,6 +70,7 @@ def test_net_uint16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_int32():
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
net = Net()
@ -70,6 +78,7 @@ def test_net_int32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_uint32():
x = np.random.randn(1, 16, 1, 1).astype(np.uint32)
net = Net()
@ -77,6 +86,7 @@ def test_net_uint32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_int64():
x = np.random.randn(1, 16, 1, 1).astype(np.int64)
net = Net()
@ -84,6 +94,7 @@ def test_net_int64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_uint64():
x = np.random.randn(1, 16, 1, 1).astype(np.uint64)
net = Net()
@ -91,6 +102,7 @@ def test_net_uint64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_float16():
x = np.random.randn(1, 16, 1, 1).astype(np.float16)
net = Net()
@ -98,6 +110,7 @@ def test_net_float16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_float32():
x = np.random.randn(1, 16, 1, 1).astype(np.float32)
net = Net()
@ -105,10 +118,10 @@ def test_net_float32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))
def test_net_float64():
x = np.random.randn(1, 16, 1, 1).astype(np.float64)
net = Net()
output = net(Tensor(x), -1)
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.expand_dims(x, -1)))

View File

@ -17,7 +17,10 @@ from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -34,6 +37,7 @@ def test_net_int8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_uint8():
x = np.random.randn(1, 16, 1, 1).astype(np.uint8)
net = Net()
@ -41,6 +45,7 @@ def test_net_uint8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_int16():
x = np.random.randn(1, 16, 1, 1).astype(np.int16)
net = Net()
@ -48,6 +53,7 @@ def test_net_int16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_uint16():
x = np.random.randn(1, 16, 1, 1).astype(np.uint16)
net = Net()
@ -55,6 +61,7 @@ def test_net_uint16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_int32():
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
net = Net()
@ -62,6 +69,7 @@ def test_net_int32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_uint32():
x = np.random.randn(1, 16, 1, 1).astype(np.uint32)
net = Net()
@ -69,6 +77,7 @@ def test_net_uint32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_int64():
x = np.random.randn(1, 16, 1, 1).astype(np.int64)
net = Net()
@ -76,6 +85,7 @@ def test_net_int64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_uint64():
x = np.random.randn(1, 16, 1, 1).astype(np.uint64)
net = Net()
@ -83,6 +93,7 @@ def test_net_uint64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_float16():
x = np.random.randn(1, 16, 1, 1).astype(np.float16)
net = Net()
@ -90,10 +101,10 @@ def test_net_float16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))
def test_net_float32():
x = np.random.randn(1, 16, 1, 1).astype(np.float32)
net = Net()
output = net(Tensor(x))
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.flatten()))

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -35,6 +38,7 @@ def test_net_bool():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_int8():
x = np.random.randn(1, 16, 1, 1).astype(np.int8)
net = Net()
@ -42,6 +46,7 @@ def test_net_int8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_uint8():
x = np.random.randn(1, 16, 1, 1).astype(np.uint8)
net = Net()
@ -49,6 +54,7 @@ def test_net_uint8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_int16():
x = np.random.randn(1, 16, 1, 1).astype(np.int16)
net = Net()
@ -56,6 +62,7 @@ def test_net_int16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_uint16():
x = np.random.randn(1, 16, 1, 1).astype(np.uint16)
net = Net()
@ -63,6 +70,7 @@ def test_net_uint16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_int32():
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
net = Net()
@ -70,6 +78,7 @@ def test_net_int32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_uint32():
x = np.random.randn(1, 16, 1, 1).astype(np.uint32)
net = Net()
@ -77,6 +86,7 @@ def test_net_uint32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_int64():
x = np.random.randn(1, 16, 1, 1).astype(np.int64)
net = Net()
@ -84,6 +94,7 @@ def test_net_int64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_uint64():
x = np.random.randn(1, 16, 1, 1).astype(np.uint64)
net = Net()
@ -91,6 +102,7 @@ def test_net_uint64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_float16():
x = np.random.randn(1, 16, 1, 1).astype(np.float16)
net = Net()
@ -98,6 +110,7 @@ def test_net_float16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_float32():
x = np.random.randn(1, 16, 1, 1).astype(np.float32)
net = Net()
@ -105,10 +118,10 @@ def test_net_float32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))
def test_net_float64():
x = np.random.randn(1, 16, 1, 1).astype(np.float64)
net = Net()
output = net(Tensor(x))
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.isfinite(x)))

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -35,6 +38,7 @@ def test_net_bool():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_int8():
x = np.random.randn(1, 16, 1, 1).astype(np.int8)
net = Net()
@ -42,6 +46,7 @@ def test_net_int8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_uint8():
x = np.random.randn(1, 16, 1, 1).astype(np.uint8)
net = Net()
@ -49,6 +54,7 @@ def test_net_uint8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_int16():
x = np.random.randn(1, 16, 1, 1).astype(np.int16)
net = Net()
@ -56,6 +62,7 @@ def test_net_int16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_uint16():
x = np.random.randn(1, 16, 1, 1).astype(np.uint16)
net = Net()
@ -63,6 +70,7 @@ def test_net_uint16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_int32():
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
net = Net()
@ -70,6 +78,7 @@ def test_net_int32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_uint32():
x = np.random.randn(1, 16, 1, 1).astype(np.uint32)
net = Net()
@ -77,6 +86,7 @@ def test_net_uint32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_int64():
x = np.random.randn(1, 16, 1, 1).astype(np.int64)
net = Net()
@ -84,6 +94,7 @@ def test_net_int64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_uint64():
x = np.random.randn(1, 16, 1, 1).astype(np.uint64)
net = Net()
@ -91,6 +102,7 @@ def test_net_uint64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_float16():
x = np.random.randn(1, 16, 1, 1).astype(np.float16)
net = Net()
@ -98,6 +110,7 @@ def test_net_float16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_float32():
x = np.random.randn(1, 16, 1, 1).astype(np.float32)
net = Net()
@ -105,10 +118,10 @@ def test_net_float32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))
def test_net_float64():
x = np.random.randn(1, 16, 1, 1).astype(np.float64)
net = Net()
output = net(Tensor(x))
print(output.asnumpy())
assert (np.all(output.asnumpy() == np.reshape(x, (4, 4))))

View File

@ -17,7 +17,10 @@ from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -34,6 +37,7 @@ def test_net_bool():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_int8():
x = np.random.randn(1, 16, 1, 1).astype(np.int8)
net = Net()
@ -41,6 +45,7 @@ def test_net_int8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_uint8():
x = np.random.randn(1, 16, 1, 1).astype(np.uint8)
net = Net()
@ -48,6 +53,7 @@ def test_net_uint8():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_int16():
x = np.random.randn(1, 16, 1, 1).astype(np.int16)
net = Net()
@ -55,6 +61,7 @@ def test_net_int16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_uint16():
x = np.random.randn(1, 16, 1, 1).astype(np.uint16)
net = Net()
@ -62,6 +69,7 @@ def test_net_uint16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_int32():
x = np.random.randn(1, 16, 1, 1).astype(np.int32)
net = Net()
@ -69,6 +77,7 @@ def test_net_int32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_uint32():
x = np.random.randn(1, 16, 1, 1).astype(np.uint32)
net = Net()
@ -76,6 +85,7 @@ def test_net_uint32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_int64():
x = np.random.randn(1, 16, 1, 1).astype(np.int64)
net = Net()
@ -83,6 +93,7 @@ def test_net_int64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_uint64():
x = np.random.randn(1, 16, 1, 1).astype(np.uint64)
net = Net()
@ -90,6 +101,7 @@ def test_net_uint64():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_float16():
x = np.random.randn(1, 16, 1, 1).astype(np.float16)
net = Net()
@ -97,6 +109,7 @@ def test_net_float16():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_float32():
x = np.random.randn(1, 16, 1, 1).astype(np.float32)
net = Net()
@ -104,10 +117,10 @@ def test_net_float32():
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))
def test_net_float64():
x = np.random.randn(1, 16, 1, 1).astype(np.float64)
net = Net()
output = net(Tensor(x))
print(output.asnumpy())
assert (np.all(output.asnumpy() == x.squeeze()))

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -35,9 +38,11 @@ class Net(nn.Cell):
'normal', [2, 3, 3, 4]), name='gradient')
self.momentum = Parameter(initializer(
'normal', [1, ]), name='momentum')
def construct(self):
return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum)
def test_net():
apply_momentum = Net()
output = apply_momentum()

View File

@ -21,7 +21,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,11 +32,11 @@ class Net(nn.Cell):
# self.dout = Parameter(initializer(
# 'normal', [2, 3, 3, 4]), name='dout')
@ms_function
def construct(self, dout):
return self.bias_add_grad(dout)
dout = np.ones([2, 3, 4, 4]).astype(np.float32)
bias_add_grad = Net()
output = bias_add_grad(Tensor(dout))

View File

@ -21,17 +21,20 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.bias_add_grad = G.BiasAddGrad()
@ms_function
def construct(self, dout):
return self.bias_add_grad(dout)
def test_net():
dout = np.random.rand(1, 1001).astype(np.float32)
bias_add_grad = Net()

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -37,13 +40,11 @@ class Net(nn.Cell):
self.w = Parameter(initializer(
'normal', [64, 3, 7, 7]), name='w')
@ms_function
def construct(self, x):
return self.conv(x, self.w)
def test_net():
x = np.random.randn(32, 3, 224, 224).astype(np.float32)
conv = Net()

View File

@ -21,8 +21,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -35,6 +37,7 @@ class Net(nn.Cell):
def construct(self, x, out):
return self.conv2d_grad(out, x, self.get_shape(self.y))
x = Tensor(np.array([[[
[3, 0, 1, 2, 7, 4],
[1, 5, 8, 9, 3, 1],

View File

@ -21,8 +21,10 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +35,7 @@ class Grad(nn.Cell):
def construct(self, input, output_grad):
return self.grad(self.network)(input, output_grad)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -53,6 +56,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.conv(x, self.w)
def test_net():
x = np.ones([32, 2048, 7, 7]).astype(np.float32)
sens = np.ones([32, 512, 7, 7]).astype(np.float32)

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,6 +33,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.dense(x)
def test_net():
x = np.random.randn(32, 2048).astype(np.float32)
net = Net()

View File

@ -21,8 +21,10 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +35,7 @@ class Grad(nn.Cell):
def construct(self, input, output_grad):
return self.grad(self.network)(input, output_grad)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -41,6 +44,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.dense(x)
def test_net():
x = np.random.randn(32, 2048).astype(np.float32)
sens = np.random.randn(32, 1001).astype(np.float32)

View File

@ -17,6 +17,7 @@ from mindspore.ops import operations as P
import mindspore.nn as nn
import numpy as np
import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE,
device_target="Ascend")

View File

@ -21,6 +21,7 @@ import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -45,4 +48,3 @@ def test_net():
print("***********output y*********")
print(output.asnumpy())

View File

@ -21,8 +21,11 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
# context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +36,7 @@ class Grad(nn.Cell):
def construct(self, input, output_grad):
return self.grad(self.network)(input, output_grad)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

View File

@ -20,6 +20,8 @@ from mindspore import Tensor
from mindspore.common.api import ms_function
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,9 +33,11 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.matmul(x1, x2)
x1 = np.random.randn(1, 3).astype(np.float32)
x2 = np.random.randn(3, 4).astype(np.float32)
def test_net():
matmul = Net()
output = matmul(Tensor(x1), Tensor(x2))

View File

@ -20,12 +20,13 @@ import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.maxpool = P.MaxPool(pad_mode="SAME", window=3, stride=2)
@ms_function
def construct(self, x):
output = self.maxpool(x)

View File

@ -19,6 +19,7 @@ from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")

View File

@ -21,8 +21,10 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +35,7 @@ class Grad(nn.Cell):
def construct(self, input, output_grad):
return self.grad(self.network)(input, output_grad)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -46,6 +49,7 @@ class Net(nn.Cell):
output = self.maxpool(x)
return output[0]
def test_net():
x = np.random.randn(32, 64, 112, 112).astype(np.float32)
sens = np.random.randn(32, 64, 56, 56).astype(np.float32)

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,6 +33,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.relu(x)
def test_net():
x = np.random.randn(2, 3, 3, 4).astype(np.float32)
relu = Net()

View File

@ -21,8 +21,10 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +35,7 @@ class Grad(nn.Cell):
def construct(self, input, output_grad):
return self.grad(self.network)(input, output_grad)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -41,6 +44,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.relu(x)
def test_net():
x = np.random.randn(2, 3, 3, 4).astype(np.float32)
sens = np.random.randn(2, 3, 3, 4).astype(np.float32)

View File

@ -18,11 +18,15 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.reshape = P.Reshape()
@ms_function
def construct(self, tensor):
return self.reshape(tensor, (1, 16))

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,6 +33,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.simplemean(x, (-2, -1))
def test_net():
x = np.random.randn(32, 2048, 7, 7).astype(np.float32)
simplemean = Net()

View File

@ -21,8 +21,10 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +35,7 @@ class Grad(nn.Cell):
def construct(self, input, output_grad):
return self.grad(self.network)(input, output_grad)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -41,6 +44,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.simplemean(x, (-2, -1))
def test_net():
x = np.random.randn(32, 2048, 7, 7).astype(np.float32)
sens = np.random.randn(32, 2048, 1, 1).astype(np.float32)

View File

@ -18,6 +18,7 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
@ -30,6 +31,7 @@ class Net(nn.Cell):
def construct(self, features, labels):
return self.SparseSoftmaxCrossEntropyWithLogits(features, labels)
def np_sparse_softmax_cross_entropy_with_logits(labels_shape, logits_shape, logits_dtype):
num_class = logits_shape[1]
labels = np.random.randint(low=0, high=num_class - 1, size=labels_shape).astype(np.int32)
@ -65,4 +67,6 @@ def test_net():
print(loss_me.asnumpy().flatten())
print("-------------------------")
print(expect)
test_net()

View File

@ -21,6 +21,7 @@ import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self, is_grad=False):
super(Net, self).__init__()

View File

@ -20,11 +20,13 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
"""Net definition"""
def __init__(self):
super(Net, self).__init__()
self.AssignAdd = P.AssignAdd()

View File

@ -20,11 +20,13 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
"""Net definition"""
def __init__(self):
super(Net, self).__init__()
self.AssignSub = P.AssignSub()

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self, keep_dims, axis):
super(Net, self).__init__()
@ -31,8 +34,10 @@ class Net(nn.Cell):
def construct(self, inputs):
return self.reduce_mean(inputs, self.axis)
x1 = np.random.randn(64).astype(np.float32)
def test_net():
keepdims = False
axis = -1

View File

@ -21,6 +21,7 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,6 +30,7 @@ class Net(nn.Cell):
def construct(self, x, y):
return self.add(x, y)
x = np.random.randn(1, 3, 3, 4).astype(np.float32)
y = np.random.randn(1, 3, 3, 4).astype(np.float32)

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,6 +32,7 @@ class Net(nn.Cell):
def construct(self, x, y):
return self.add((x, y))
def test_net():
x = np.random.randn(1, 3, 3, 4).astype(np.float32)
y = np.random.randn(1, 3, 3, 4).astype(np.float32)

View File

@ -19,6 +19,7 @@ from mindspore.nn import Dense, SoftmaxCrossEntropyWithLogits
from mindspore.nn import TrainOneStepCell, WithLossCell
import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", impl_type="tbe")
context.set_context(enable_task_sink=True)

View File

@ -21,6 +21,7 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -35,9 +36,11 @@ class Net(nn.Cell):
'normal', [2, 3, 3, 4]), name='gradient')
self.momentum = Parameter(initializer(
'normal', [1, ]), name='momentum')
def construct(self):
return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum)
def test_net():
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
apply_momentum = Net()

View File

@ -19,8 +19,10 @@ from mindspore.nn import Cell
from mindspore.train.model import Model
import pytest
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,17 +32,20 @@ class Net(Cell):
x = self.batchmatmul(inputa, inputb)
return x
def tf_me_batchmatmul(inputa, inputb):
net = Net()
net.set_train()
model = Model(net)
out_me = model.predict(Tensor(inputa), Tensor(inputb))
def test_batchmatmul_normal_shape1():
inputa = np.random.randn(128, 16, 128).astype(np.float32)
inputb = np.random.randn(128, 128, 64).astype(np.float32)
tf_me_batchmatmul(Tensor(inputa), Tensor(inputb))
def test_batchmatmul_normal_shape2():
inputa = np.random.randn(1, 16, 128, 128).astype(np.float32)
inputb = np.random.randn(1, 16, 128, 64).astype(np.float32)

View File

@ -21,6 +21,7 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -55,4 +56,3 @@ def test_net():
print("***********output y*********")
print(output.asnumpy())

View File

@ -21,8 +21,11 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
# context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()

View File

@ -20,11 +20,13 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
"""Net definition"""
def __init__(self,
output_channels,
bias_init='zeros',
@ -51,4 +53,3 @@ def test_compile():
# enable it when staging function is ready
output = net(input_data)
print(output.asnumpy())

View File

@ -21,7 +21,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -31,6 +34,7 @@ class Net(nn.Cell):
def construct(self, dout):
return self.bias_add_grad(dout)
def test_net():
dout = np.random.rand(1, 1001).astype(np.float32)
bias_add_grad = Net()

View File

@ -20,6 +20,7 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -21,6 +21,7 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -37,13 +38,11 @@ class Net(nn.Cell):
self.w = Parameter(initializer(
'normal', [64, 3, 7, 7]), name='w')
@ms_function
def construct(self, x):
return self.conv(x, self.w)
def test_net():
x = np.random.randn(32, 3, 224, 224).astype(np.float32)
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -21,6 +21,7 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target='Ascend')
@ -37,7 +38,9 @@ class Net(nn.Cell):
stride=1,
dilation=1,
group=1)
self.w = Parameter(initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]), name='w')
self.w = Parameter(
initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]),
name='w')
self.x = Parameter(initializer(Tensor(np.array([[[
[3, 0, 1, 2, 7, 4],
[1, 5, 8, 9, 3, 1],

View File

@ -20,6 +20,7 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
@ -36,7 +37,9 @@ class Net(nn.Cell):
stride=1,
dilation=1,
group=1)
self.w = Parameter(initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]), name='w')
self.w = Parameter(
initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]),
name='w')
self.x = Parameter(initializer(Tensor(np.array([[[
[3, 0, 1, 2, 7, 4],
[1, 5, 8, 9, 3, 1],

View File

@ -20,9 +20,11 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
from mindspore import log as logger
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -48,4 +50,3 @@ def test_net():
logger.info("***********output y*********")
logger.info(output.asnumpy())

View File

@ -21,6 +21,7 @@ import math
import pytest
from mindspore import context
from mindspore import log as logger
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
@ -52,6 +53,7 @@ def test_gelu_input_dim_0():
with pytest.raises(ValueError):
gelu_forward_cmp(input_shape)
def test_gelu_input_dim_10240_1024():
input_shape = [10240, 1024]
gelu_forward_cmp(input_shape)
@ -96,6 +98,7 @@ def test_gelu_input_dim_128_4096():
input_shape = [128, 4096]
gelu_forward_cmp(input_shape)
@pytest.mark.lower_bs
def test_gelu_input_dim_160_1024():
input_shape = [160, 1024]

View File

@ -25,6 +25,7 @@ from mindspore import log as logger
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Grad(Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -55,6 +56,7 @@ def gelu_backward_cmp(input_shape):
logger.info("---------me--------")
logger.info(output_grad_me)
# ---------- LARGE INPUT ---------------
class MEGeluLargeIn(Cell):
@ -67,6 +69,7 @@ class MEGeluLargeIn(Cell):
x = self.matmul(x1, x2)
return self.gelu(x)
class GradLargeIn(Cell):
def __init__(self, network):
super(GradLargeIn, self).__init__()

View File

@ -20,8 +20,10 @@ from mindspore.common.tensor import Tensor
from mindspore.train.model import Model
from mindspore import log as logger
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Greater(Cell):
def __init__(self):
super(Greater, self).__init__()
@ -30,6 +32,7 @@ class Greater(Cell):
def construct(self, inputa, inputb):
return self.greater(inputa, inputb)
def me_greater(inputa, inputb):
net = Greater()
net.set_train()
@ -42,6 +45,7 @@ def me_greater(inputa, inputb):
logger.info(inputb)
return out.asnumpy()
@pytest.mark.ssd_tbe
def test_greater_2d_scalar0():
a = np.random.randint(-5, 5, [8, 32]).astype(np.int32)

View File

@ -20,8 +20,10 @@ from mindspore.train.model import Model
from mindspore import log as logger
import pytest
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(Cell):
def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta):
super(Net, self).__init__()
@ -31,6 +33,7 @@ class Net(Cell):
x = self.layernorm(input)
return x
def pt_me_layernorm(input_data, normalized_shape, gamma, beta, axis):
net = Net(normalized_shape, begin_norm_axis=axis,
begin_params_axis=axis,
@ -42,6 +45,7 @@ def pt_me_layernorm(input_data, normalized_shape, gamma, beta, axis):
logger.info("Check me result:")
logger.info(out_me.asnumpy())
@pytest.mark.lower_bs
def test_normal_layernorm_1_128_1024_axis_2():
"""

View File

@ -19,8 +19,10 @@ from mindspore.nn import Cell
from mindspore.ops.composite import GradOperation
from mindspore import log as logger
from mindspore import context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Grad(Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -31,6 +33,7 @@ class Grad(Cell):
gout = self.grad(self.network)(input, output_grad)
return gout
class Net(Cell):
def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta):
super(Net, self).__init__()
@ -40,6 +43,7 @@ class Net(Cell):
x = self.layernorm(input)
return x
def py_me_layernorm_grad(input_data, normalized_shape, gamma, beta, axis, gradients):
input_me = Tensor(input_data)
net_me = Grad(Net(normalized_shape, begin_norm_axis=axis,
@ -52,6 +56,7 @@ def py_me_layernorm_grad(input_data, normalized_shape, gamma, beta, axis, gradie
logger.info("Check me result:")
logger.info(out_grad.asnumpy())
def test_normal_layernorm_grad_normalize_2d():
"""
1 input[1, 128, 1024],normalized_shape=[1024],element_affine=False

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,13 +31,14 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.less(x1, x2)
x1 = np.random.randn(3, 4).astype(np.float16)
x2 = np.random.randn(3, 4).astype(np.float16)
def test_net():
less = Net()
output = less(Tensor(x1), Tensor(x2))
print(x1)
print(x2)
print(output.asnumpy())

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,13 +31,14 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.less_equal(x1, x2)
x1 = np.random.randn(3, 4).astype(np.float16)
x2 = np.random.randn(3, 4).astype(np.float16)
def test_net():
less_equal = Net()
output = less_equal(Tensor(x1), Tensor(x2))
print(x1)
print(x2)
print(output.asnumpy())

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,12 +31,14 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.logical_and(x1, x2)
x1 = [True, True, False, False, True, True, False, False]
x2 = [True, False, False, True, True, False, False, True]
def test_net():
logical_and = Net()
output = logical_and(Tensor(x1), Tensor(x2))
print(x1)
print(x2)
print(output.asnumpy())

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,11 +31,12 @@ class Net(nn.Cell):
def construct(self, x1):
return self.logical_not(x1)
x1 = [True, True, False, False, True, True, False, False]
def test_net():
logical_not = Net()
output = logical_not(Tensor(x1))
print(x1)
print(output.asnumpy())

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,12 +31,14 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.logical_or(x1, x2)
x1 = [True, True, False, False, True, True, False, False]
x2 = [True, False, False, True, True, False, False, True]
def test_net():
logical_or = Net()
output = logical_or(Tensor(x1), Tensor(x2))
print(x1)
print(x2)
print(output.asnumpy())

View File

@ -21,6 +21,7 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,9 +31,11 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.matmul(x1, x2)
x1 = np.random.randn(1, 3).astype(np.float32)
x2 = np.random.randn(3, 4).astype(np.float32)
def test_net():
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
matmul = Net()

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,9 +33,11 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.matmul(x1, x2)
x1 = np.random.randn(10, 1).astype(np.float32)
x2 = np.random.randn(100, 1).astype(np.float32)
def test_net():
matmul = Net()
output = matmul(Tensor(x1), Tensor(x2))

View File

@ -22,6 +22,7 @@ from mindspore.ops import operations as P
context.set_context(device_target="Ascend")
class Max(nn.Cell):
def __init__(self, dtype):
super(Max, self).__init__()
@ -30,6 +31,7 @@ class Max(nn.Cell):
def construct(self, inputa, inputb):
return self.max(inputa, inputb)
def me_max(inputa, inputb, dtype=ms.float32):
context.set_context(mode=context.GRAPH_MODE)
net = Max(dtype)
@ -44,6 +46,7 @@ def me_max(inputa, inputb, dtype=ms.float32):
print(out)
return out.asnumpy()
def cmp_max(a, b):
out = np.maximum(a, b)
out_ms = me_max(a, b)
@ -51,6 +54,7 @@ def cmp_max(a,b):
print("numpy out :{}".format(out))
print("ms out :{}".format(out_ms))
def test_maximum_2_2():
a = np.random.randn(2, 2).astype(np.float32)
b = np.random.randn(2, 2).astype(np.float32)

View File

@ -22,6 +22,7 @@ from mindspore.ops import operations as P
context.set_context(device_target="Ascend")
grad = C.GradOperation('get_all', get_all=True, sens_param=True)
class MaxNetMe(Cell):
def __init__(self):
super(MaxNetMe, self).__init__()
@ -31,6 +32,7 @@ class MaxNetMe(Cell):
x = self.max(inputA, inputB)
return x
class GradWrap(Cell):
def __init__(self, network):
super(GradWrap, self).__init__()
@ -40,6 +42,7 @@ class GradWrap(Cell):
gout = grad(self.network)(inputA, inputB, sens)
return gout
def gen_data(inputA_np, inputB_np, grad=None):
inputA_me = inputA_np
if isinstance(inputA_np, np.ndarray) == True:
@ -61,6 +64,7 @@ def gen_data(inputA_np, inputB_np, grad=None):
print(output[0].asnumpy())
print(output[1].asnumpy())
def test_net():
inputA_np = np.random.randn(1, 3, 2, 2).astype(np.float32)
inputB_np = np.random.randn(1, 3, 2, 2).astype(np.float32)

View File

@ -19,12 +19,12 @@ from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.maxpool = P.MaxPool(padding="SAME", ksize=3, strides=2)
@ms_function
def construct(self, x):
output = self.maxpool(x)

View File

@ -19,6 +19,7 @@ from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")

View File

@ -22,7 +22,10 @@ from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
import mindspore as ms
from mindspore.train.model import Model
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Min(nn.Cell):
def __init__(self, dtype):
super(Min, self).__init__()
@ -46,6 +49,7 @@ def me_min(inputa, inputb, dtype=ms.float32):
print(out)
return out.asnumpy()
def cmp_min(a, b):
print(a)
print(b)
@ -55,8 +59,8 @@ def cmp_min(a,b):
out_me = me_min(a, b)
print(out_me)
def test_minimum_2_2():
a = np.random.randn(2, 2, 1, 1).astype(np.float32)
b = np.random.randn(2, 2, 1, 1).astype(np.float32)
cmp_min(a, b)

View File

@ -22,6 +22,8 @@ from mindspore.ops.operations import Minimum
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
grad = C.GradOperation('get_all', get_all=True, sens_param=True)
class MinNetMe(Cell):
def __init__(self):
super(MinNetMe, self).__init__()
@ -41,6 +43,7 @@ class GradWrap(Cell):
gout = grad(self.network)(inputA, inputB, sens)
return gout
def gen_data(inputA_np, inputB_np, grad=None):
inputA_me = inputA_np
if isinstance(inputA_np, np.ndarray) == True:

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,9 +31,11 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.mul(x1, x2)
x1 = np.random.randn(3, 4).astype(np.float32)
x2 = np.random.randn(3, 4).astype(np.float32)
def test_net():
mul = Net()
output = mul(Tensor(x1), Tensor(x2))

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,8 +31,8 @@ class Net(nn.Cell):
def construct(self):
return self.npu_alloc_float_status()
def test_net():
npu_alloc_float_status = Net()
output = npu_alloc_float_status()
print(output.asnumpy())

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,11 +31,12 @@ class Net(nn.Cell):
def construct(self, x1):
return self.npu_clear_float_status(x1)
x1 = np.random.randn(8).astype(np.float32)
def test_net():
npu_clear_float_status = Net()
output = npu_clear_float_status(Tensor(x1))
print(x1)
print(output.asnumpy())

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,11 +31,12 @@ class Net(nn.Cell):
def construct(self, x1):
return self.npu_get_float_status(x1)
x1 = np.random.randn(8).astype(np.float32)
def test_net():
npu_get_float_status = Net()
output = npu_get_float_status(Tensor(x1))
print(x1)
print(output.asnumpy())

View File

@ -18,6 +18,7 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
@ -31,8 +32,10 @@ class Net(nn.Cell):
x = self.pad(x)
return x
x = np.random.random(size=(2, 2)).astype(np.float32)
def test_net():
pad = Net()
output = pad(Tensor(x))

View File

@ -23,8 +23,10 @@ from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
import mindspore as ms
from mindspore.train.model import Model
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class PowMe(Cell):
def __init__(self):
super(PowMe, self).__init__()
@ -33,6 +35,7 @@ class PowMe(Cell):
def construct(self, input, exp):
return self.pow(input, exp)
def pow_forward_me_impl(input, exp):
n = PowMe()
n.set_train()
@ -40,6 +43,7 @@ def pow_forward_me_impl(input, exp):
out = m.predict(input, exp)
return out.asnumpy()
def pow_forward_cmp(input_shape, exp_shape):
if len(input_shape) == 0:
input_np = np.absolute(np.random.randn())
@ -60,8 +64,8 @@ def pow_forward_cmp(input_shape, exp_shape):
print(exp_me)
print(out_me)
def test_pow_input_scalar_exp_scalar():
input_shape = []
exp_shape = []
pow_forward_cmp(input_shape, exp_shape)

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,9 +31,11 @@ class Net(nn.Cell):
def construct(self, x1, x2):
return self.realdiv(x1, x2)
x1 = np.random.randn(3, 4).astype(np.float32)
x2 = np.random.randn(3, 4).astype(np.float32)
def test_net():
realdiv = Net()
output = realdiv(Tensor(x1), Tensor(x2))

View File

@ -18,7 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -28,11 +31,12 @@ class Net(nn.Cell):
def construct(self, x1):
return self.reciprocal(x1)
x1 = np.random.randn(3, 4).astype(np.float32)
def test_net():
reciprocal = Net()
output = reciprocal(Tensor(x1))
print(x1)
print(output.asnumpy())

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,6 +33,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.relu(x)
def test_net():
x = np.random.randn(2, 3, 3, 4).astype(np.float32)
relu = Net()

View File

@ -21,8 +21,10 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
context.set_context(device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +35,7 @@ class Grad(nn.Cell):
def construct(self, input, output_grad):
return self.grad(self.network)(input, output_grad)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -41,6 +44,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.relu(x)
def test_net():
x = np.random.randn(2, 3, 3, 4).astype(np.float32)
sens = np.random.randn(2, 3, 3, 4).astype(np.float32)

View File

@ -21,8 +21,10 @@ import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
from mindspore.ops.composite import GradOperation
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -33,6 +35,7 @@ class Grad(nn.Cell):
def construct(self, input):
return self.grad(self.network)(input)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -41,6 +44,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.relu_v2(x)
def test_net():
x = Tensor(np.ones((2, 3, 3, 4)).astype(np.float32))
relu_net = Net()

View File

@ -18,8 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,6 +31,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.upsample(x)
def test_net():
x = np.random.random(size=(32, 3, 32, 32)).astype(np.float32)
upsample = Net()

View File

@ -19,6 +19,7 @@ from mindspore.ops.composite import GradOperation
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")

View File

@ -20,6 +20,7 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
@ -35,6 +36,7 @@ class Net(nn.Cell):
indices = np.array([[0, 1], [1, 1]]).astype(np.int32)
update = np.array([3.2, 1.1]).astype(np.float32)
def test_net():
scatternd = Net()
print(indices)

View File

@ -23,7 +23,10 @@ from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
import mindspore as ms
from mindspore.train.model import Model
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Select(Cell):
def __init__(self, dtype):
super(Select, self).__init__()
@ -32,6 +35,7 @@ class Select(Cell):
def construct(self, cond, inputa, inputb):
return self.select(cond, inputa, inputb)
def me_select(cond, inputa, inputb, dtype=ms.float32):
net = Select(dtype)
net.set_train()
@ -46,6 +50,7 @@ def me_select(cond, inputa, inputb, dtype=ms.float32):
out = model.predict(Tensor(cond), inputa, inputb)
return out.asnumpy()
def cmp_select(input_cond, inputa, inputb):
cond = input_cond > 0.5
out_me = me_select(cond, inputa, inputb)
@ -55,9 +60,9 @@ def cmp_select(input_cond,inputa,inputb):
print(inputb)
print(out_me)
def test_select_2_2():
input_cond = np.random.rand(2, 2)
inputa = np.random.randn(2, 2).astype(np.float32)
inputb = np.random.randn(2, 2).astype(np.float32)
cmp_select(input_cond, inputa, inputb)

View File

@ -18,8 +18,10 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,6 +31,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.sigmoid(x)
def test_net():
x = np.random.random(size=(2, 3)).astype(np.float32)
sigmoid = Net()

View File

@ -21,6 +21,7 @@ import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

View File

@ -22,6 +22,7 @@ import mindspore.context as context
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

View File

@ -19,6 +19,7 @@ from mindspore.ops.composite import GradOperation
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")
@ -42,6 +43,7 @@ class Grad(nn.Cell):
def construct(self, x, y):
return self.grad(self.network)(x, y)
def test_net():
x = np.random.random(size=(2, 3, 4, 5, 6)).astype(np.float32)
y = np.random.random(size=(2, 3, 4, 5, 6)).astype(np.float32)
@ -49,4 +51,3 @@ def test_net():
output = net(Tensor(x), Tensor(y))
print("=================output====================")
print(output.asnumpy())

View File

@ -20,6 +20,7 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
@ -29,7 +30,8 @@ class Slice(nn.Cell):
self.cat = P.Slice()
self.x1 = Parameter(initializer(
Tensor(np.array([[[1, -1, 1], [2, -2, 2]], [[3, -3, 3], [4, -4, 4]], [[5, -5, 5], [6, -6, 6]]]).astype(np.float32)), [3,2,3]), name='x1')
Tensor(np.array([[[1, -1, 1], [2, -2, 2]], [[3, -3, 3], [4, -4, 4]], [[5, -5, 5], [6, -6, 6]]]).astype(
np.float32)), [3, 2, 3]), name='x1')
@ms_function
def construct(self):

View File

@ -18,6 +18,7 @@ import mindspore.nn as nn
import mindspore.context as context
from mindspore import Tensor
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -31,6 +31,7 @@ class Net(nn.Cell):
def construct(self, pred, gt):
return self.SmoothL1Loss(pred, gt)
class Grad(nn.Cell):
def __init__(self, network):
super(Grad, self).__init__()

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,8 +32,10 @@ class Net(nn.Cell):
def construct(self, x):
return self.Softmax(x)
x = np.array([[5, 1]]).astype(np.float32)
def test_net():
softmax = Net()
output = softmax(Tensor(x))

View File

@ -18,6 +18,7 @@ import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np
import mindspore.context as context
context.set_context(device_target="Ascend")

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,6 +32,7 @@ class Net(nn.Cell):
def construct(self, x):
return self.split(x)
x = np.random.randn(2, 4).astype(np.float32)

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,8 +32,10 @@ class Net(nn.Cell):
def construct(self, x):
return self.sqrt(x)
x = np.array([1.0, 4.0, 9.0]).astype(np.float32)
def test_net():
sqrt = Net()
output = sqrt(Tensor(x))

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,8 +32,10 @@ class Net(nn.Cell):
def construct(self, x):
return self.square(x)
x = np.array([1.0, 4.0, 9.0]).astype(np.float32)
def test_net():
square = Net()
output = square(Tensor(x))

View File

@ -19,7 +19,10 @@ from mindspore.nn import Cell
from mindspore.train.model import Model
import pytest
import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(Cell):
def __init__(self, begin, end, stride):
super(Net, self).__init__()
@ -32,6 +35,7 @@ class Net(Cell):
x = self.stridedslice(input, self.begin, self.end, self.stride)
return x
def me_stridedslice(input1, begin, end, stride):
input_me = Tensor(input1)
net = Net(begin, end, stride)
@ -40,6 +44,7 @@ def me_stridedslice(input1, begin, end, stride):
output = model.predict(input_me)
print(output.asnumpy())
def test_stridedslice_input_2d():
input = np.random.randn(5, 5).astype(np.int32)
begin = (0, 0)
@ -48,6 +53,7 @@ def test_stridedslice_input_2d():
me_stridedslice(input, begin, end, stride)
def test_stridedslice_input_3d():
input = np.random.randn(5, 5, 5).astype(np.float32)
begin = (0, 0, 0)

View File

@ -19,8 +19,10 @@ from mindspore.nn import Cell
from mindspore.ops.composite import GradOperation
from mindspore import context
import pytest
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Grad(Cell):
def __init__(self, network):
super(Grad, self).__init__()
@ -31,6 +33,7 @@ class Grad(Cell):
gout = self.grad(self.network)(input, output_grad)
return gout
class Net(Cell):
def __init__(self, begin, end, stride):
super(Net, self).__init__()
@ -43,6 +46,7 @@ class Net(Cell):
x = self.stridedslice(input, self.begin, self.end, self.stride)
return x
def me_stridedslice(input, begin, end, stride, gradients):
input_me = Tensor(input)
out_grad_me = Tensor(gradients)
@ -51,6 +55,7 @@ def me_stridedslice(input, begin, end, stride, gradients):
out_grad = net_me(input_me, out_grad_me)
print(out_grad.asnumpy())
def test_grad_stridedslice_1d():
input = np.random.randn(2).astype(np.float32)
begin = (0,)

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,6 +32,7 @@ class Net(nn.Cell):
def construct(self, x, y):
return self.sub(x, y)
x = np.random.randn(1, 3, 3, 4).astype(np.float32)
y = np.random.randn(1, 3, 3, 4).astype(np.float32)

View File

@ -21,6 +21,7 @@ from mindspore.ops import operations as P
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -29,9 +30,12 @@ class Net(nn.Cell):
def construct(self, x):
return self.tanh(x)
input_shape = [1]
input_np = np.random.randn(*input_shape).astype(np.float32)
input_me = Tensor(input_np)
def test_net():
context.set_context(mode=context.GRAPH_MODE)
tanh = Net()

View File

@ -22,6 +22,7 @@ from mindspore.ops.operations import _grad_ops as G
context.set_context(device_target="Ascend")
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
@ -30,9 +31,12 @@ class Net(nn.Cell):
def construct(self, y, dy):
return self.tanh_grad(y, dy)
input_shape = [1]
input_np = np.random.randn(*input_shape).astype(np.float32)
input_me = Tensor(input_np)
def test_net():
context.set_context(mode=context.GRAPH_MODE)
tanh_grad = Net()

View File

@ -20,6 +20,7 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

View File

@ -20,7 +20,10 @@ import numpy as np
import mindspore.context as context
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell):
def __init__(self, k):
super(Net, self).__init__()
@ -41,4 +44,3 @@ def test_net():
print("***********output y*********")
print(output[0].asnumpy())

Some files were not shown because too many files have changed in this diff Show More