forked from mindspore-Ecosystem/mindspore
Fix pylint warning.
This commit is contained in:
parent
831ceba6eb
commit
c6d8a4dc98
|
@ -32,7 +32,7 @@ def rec2():
|
|||
return rec1()
|
||||
|
||||
|
||||
def test_keep_roots_recursion(x, y):
|
||||
def test_keep_roots_recursion():
|
||||
return rec1() + nonrec()
|
||||
|
||||
|
||||
|
|
|
@ -48,8 +48,6 @@ loss = nn.MSELoss()
|
|||
|
||||
|
||||
def test_build():
|
||||
input_data = Tensor(np.random.randint(0, 255, [1, 3, 224, 224]))
|
||||
input_label = Tensor(np.random.randint(0, 10, [1, 10]))
|
||||
net = Net()
|
||||
opt = Momentum(net.get_parameters(), learning_rate=0.1, momentum=0.9)
|
||||
model = Model(net, loss_fn=loss, optimizer=opt, metrics=None)
|
||||
Model(net, loss_fn=loss, optimizer=opt, metrics=None)
|
||||
|
|
|
@ -35,16 +35,18 @@ log.setLevel(level=logging.ERROR)
|
|||
relu_test = Primitive('relu_test')
|
||||
|
||||
|
||||
def test_ops_f1(x, y):
|
||||
foo = relu_test(x)
|
||||
return foo
|
||||
def test_ops_f1(x):
|
||||
test = relu_test(x)
|
||||
return test
|
||||
|
||||
|
||||
# use method2: create instance outside function use an operator with parameters
|
||||
class Conv_test(Primitive):
|
||||
@prim_attr_register
|
||||
def __init__(self, stride=0, pad=1):
|
||||
print('in conv_test init', self.stride)
|
||||
self.stride = stride
|
||||
self.pad = pad
|
||||
print('in conv_test init', self.stride, self.pad)
|
||||
|
||||
def __call__(self, x=0, y=1, z=2):
|
||||
pass
|
||||
|
@ -65,7 +67,7 @@ class ResNet(nn.Cell):
|
|||
self.weight = Parameter(tensor, name="weight")
|
||||
self.conv = Conv_test(3, 5)
|
||||
|
||||
def construct(self, x, y, train="train"):
|
||||
def construct(self, x, y):
|
||||
return x + y * self.weight + self.conv(x)
|
||||
|
||||
def get_params(self):
|
||||
|
@ -78,7 +80,7 @@ class SimpleNet(nn.Cell):
|
|||
self.weight = Parameter(tensor, name="weight")
|
||||
self.network = network
|
||||
|
||||
def construct(self, x, y, train="train"):
|
||||
def construct(self, x, y):
|
||||
return self.network(x) + self.weight * y
|
||||
|
||||
def get_params(self):
|
||||
|
@ -106,7 +108,7 @@ class SimpleNet_1(nn.Cell):
|
|||
super(SimpleNet_1, self).__init__()
|
||||
self.conv = Conv_test(2, 3)
|
||||
|
||||
def construct(self, x, y, train="train"):
|
||||
def construct(self, x, y):
|
||||
return self.conv(x, y)
|
||||
|
||||
def get_params(self):
|
||||
|
|
|
@ -15,9 +15,8 @@
|
|||
"""
|
||||
file: parser_integrate.py
|
||||
"""
|
||||
import mindspore._c_expression as me
|
||||
import numpy as np
|
||||
|
||||
import mindspore._c_expression as me
|
||||
import mindspore.nn as nn
|
||||
from mindspore.common import dtype
|
||||
from mindspore.common.api import ms_function, _executor
|
||||
|
@ -110,9 +109,9 @@ def test_tensor_add():
|
|||
Y.set_dtype(dtype.float32)
|
||||
X = me.tensor(np.ones([2, 3]))
|
||||
Y = me.tensor(np.ones([2, 3]))
|
||||
sum = add(X, Y)
|
||||
tensor_add = add(X, Y)
|
||||
print("test tensor add")
|
||||
return sum
|
||||
return tensor_add
|
||||
|
||||
|
||||
def loss_func(x, y):
|
||||
|
@ -129,7 +128,7 @@ def test_resetnet50_build():
|
|||
X.set_dtype(dtype.float32)
|
||||
Y.set_dtype(dtype.float32)
|
||||
network = resnet50()
|
||||
model = Model(network=network, loss_fn=loss_func, optimizer=optimizer)
|
||||
Model(network=network, loss_fn=loss_func, optimizer=optimizer)
|
||||
|
||||
|
||||
class Net(nn.Cell):
|
||||
|
@ -146,20 +145,20 @@ class TestNet(nn.Cell):
|
|||
super(TestNet, self).__init__()
|
||||
self.param = Parameter(Tensor([1, 3, 16, 50]), "param")
|
||||
|
||||
def construct(self, input):
|
||||
self.param = self.param + input
|
||||
def construct(self, inputs):
|
||||
self.param = self.param + inputs
|
||||
return self.param
|
||||
|
||||
|
||||
def test_compile_conv2d():
|
||||
net = Net()
|
||||
input = Tensor(np.ones([1, 3, 16, 50]).astype(np.float32))
|
||||
_executor.compile(net, input)
|
||||
inputs = Tensor(np.ones([1, 3, 16, 50]).astype(np.float32))
|
||||
_executor.compile(net, inputs)
|
||||
|
||||
|
||||
def test_none(x, y):
|
||||
def func(x, y):
|
||||
if y == None:
|
||||
if y is None:
|
||||
return x
|
||||
return x + y
|
||||
|
||||
|
|
|
@ -176,20 +176,19 @@ def test_funcdef(x, y):
|
|||
def mymax(a, b):
|
||||
if a > b:
|
||||
return a
|
||||
else:
|
||||
return b
|
||||
return b
|
||||
|
||||
t = mymax(x, y)
|
||||
return t
|
||||
|
||||
|
||||
def test_tuple_fn(x, y):
|
||||
def test_tuple_fn(y):
|
||||
l = (1, 2, 3, 5, 7)
|
||||
l = l + l[y]
|
||||
return l
|
||||
|
||||
|
||||
def test_list_fn(x, y):
|
||||
def test_list_fn(y):
|
||||
l = [1, 2, 3, 5, 7]
|
||||
l = l + l[y]
|
||||
return l
|
||||
|
@ -265,7 +264,7 @@ def test_simple_closure(a, b):
|
|||
return f() * g()
|
||||
|
||||
|
||||
def test_assign_tuple(x, y):
|
||||
def test_assign_tuple():
|
||||
a = 1
|
||||
b = 2
|
||||
t = a, b
|
||||
|
|
Loading…
Reference in New Issue