forked from mindspore-Ecosystem/mindspore
modify conv2dtranspose
This commit is contained in:
parent
697e8d30ca
commit
88e763a98f
|
@ -37,7 +37,8 @@ class _Conv(Cell):
|
||||||
group,
|
group,
|
||||||
has_bias,
|
has_bias,
|
||||||
weight_init,
|
weight_init,
|
||||||
bias_init):
|
bias_init,
|
||||||
|
transposed=False):
|
||||||
super(_Conv, self).__init__()
|
super(_Conv, self).__init__()
|
||||||
self.in_channels = check_int_positive(in_channels)
|
self.in_channels = check_int_positive(in_channels)
|
||||||
self.out_channels = check_int_positive(out_channels)
|
self.out_channels = check_int_positive(out_channels)
|
||||||
|
@ -65,9 +66,11 @@ class _Conv(Cell):
|
||||||
if out_channels % group != 0:
|
if out_channels % group != 0:
|
||||||
raise ValueError("Attr 'out_channels' of 'Conv2D' Op must be divisible by "
|
raise ValueError("Attr 'out_channels' of 'Conv2D' Op must be divisible by "
|
||||||
"attr 'group' of 'Conv2D' Op.")
|
"attr 'group' of 'Conv2D' Op.")
|
||||||
|
if transposed:
|
||||||
self.weight = Parameter(initializer(weight_init, [out_channels, in_channels // group, *kernel_size]),
|
shape = [in_channels, out_channels // group, *kernel_size]
|
||||||
name='weight')
|
else:
|
||||||
|
shape = [out_channels, in_channels // group, *kernel_size]
|
||||||
|
self.weight = Parameter(initializer(weight_init, shape), name='weight')
|
||||||
|
|
||||||
if check_bool(has_bias):
|
if check_bool(has_bias):
|
||||||
self.bias = Parameter(initializer(bias_init, [out_channels]), name='bias')
|
self.bias = Parameter(initializer(bias_init, [out_channels]), name='bias')
|
||||||
|
@ -312,8 +315,8 @@ class Conv2dTranspose(_Conv):
|
||||||
# cause Conv2DBackpropInput's out_channel refers to Conv2D's out_channel,
|
# cause Conv2DBackpropInput's out_channel refers to Conv2D's out_channel,
|
||||||
# then Conv2dTranspose's out_channel refers to Conv2DBackpropInput's in_channel.
|
# then Conv2dTranspose's out_channel refers to Conv2DBackpropInput's in_channel.
|
||||||
super(Conv2dTranspose, self).__init__(
|
super(Conv2dTranspose, self).__init__(
|
||||||
out_channels,
|
|
||||||
in_channels,
|
in_channels,
|
||||||
|
out_channels,
|
||||||
kernel_size,
|
kernel_size,
|
||||||
stride,
|
stride,
|
||||||
pad_mode,
|
pad_mode,
|
||||||
|
@ -322,10 +325,11 @@ class Conv2dTranspose(_Conv):
|
||||||
group,
|
group,
|
||||||
has_bias,
|
has_bias,
|
||||||
weight_init,
|
weight_init,
|
||||||
bias_init)
|
bias_init,
|
||||||
|
transposed=True)
|
||||||
|
|
||||||
self.out_channels = out_channels
|
|
||||||
self.in_channels = in_channels
|
self.in_channels = in_channels
|
||||||
|
self.out_channels = out_channels
|
||||||
self.shape = P.Shape()
|
self.shape = P.Shape()
|
||||||
if pad_mode not in ('valid', 'same', 'pad'):
|
if pad_mode not in ('valid', 'same', 'pad'):
|
||||||
raise ValueError('Attr \'pad_mode\' of \'Conv2dTranspose\' Op passed '
|
raise ValueError('Attr \'pad_mode\' of \'Conv2dTranspose\' Op passed '
|
||||||
|
|
|
@ -20,7 +20,6 @@ import mindspore.nn as nn
|
||||||
from mindspore import Tensor
|
from mindspore import Tensor
|
||||||
from ..ut_filter import non_graph_engine
|
from ..ut_filter import non_graph_engine
|
||||||
|
|
||||||
|
|
||||||
weight = Tensor(np.ones([2, 2]))
|
weight = Tensor(np.ones([2, 2]))
|
||||||
in_channels = 3
|
in_channels = 3
|
||||||
out_channels = 64
|
out_channels = 64
|
||||||
|
@ -28,6 +27,7 @@ out_channels = 64
|
||||||
|
|
||||||
class Net(nn.Cell):
|
class Net(nn.Cell):
|
||||||
""" Net definition """
|
""" Net definition """
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
cin,
|
cin,
|
||||||
cout,
|
cout,
|
||||||
|
@ -93,12 +93,14 @@ def test_compile_pad_pad():
|
||||||
input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))
|
input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))
|
||||||
net(input_data)
|
net(input_data)
|
||||||
|
|
||||||
|
|
||||||
def test_conv_group_error():
|
def test_conv_group_error():
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
nn.Conv2d(6, 8, 3, group=3)
|
nn.Conv2d(6, 8, 3, group=3)
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
nn.Conv2d(6, 9, 3, group=2)
|
nn.Conv2d(6, 9, 3, group=2)
|
||||||
|
|
||||||
|
|
||||||
def test_conv_check():
|
def test_conv_check():
|
||||||
""" test_conv_check """
|
""" test_conv_check """
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -165,6 +167,13 @@ def test_compile_transpose_bias():
|
||||||
net(input_data)
|
net(input_data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_compile_transpose_bias_init():
|
||||||
|
bias = Tensor(np.random.randn(64).astype(np.float32))
|
||||||
|
net = NetConv2dTranspose(3, 64, 4, has_bias=True, weight_init='normal', bias_init=bias)
|
||||||
|
input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))
|
||||||
|
net(input_data)
|
||||||
|
|
||||||
|
|
||||||
def test_compile_transpose_valid():
|
def test_compile_transpose_valid():
|
||||||
net = NetConv2dTranspose(3, 64, 4, pad_mode='valid', weight_init='normal')
|
net = NetConv2dTranspose(3, 64, 4, pad_mode='valid', weight_init='normal')
|
||||||
input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))
|
input_data = Tensor(np.ones([1, 3, 16, 50], dtype=np.float32))
|
||||||
|
|
Loading…
Reference in New Issue