From 8f7115fda355bbd78c30e8352ae11a0e0f449e83 Mon Sep 17 00:00:00 2001 From: Shufan Date: Tue, 5 Jun 2018 12:21:22 +0800 Subject: [PATCH 01/12] Rebase to align the latest changes on test_gluon.py --- tests/python/unittest/test_gluon.py | 1047 +++++++++++++++++++++++++++ 1 file changed, 1047 insertions(+) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index bf1e0deb200b..57ef72985a66 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1237,6 +1237,1053 @@ def test_summary(): net.hybridize() assert_raises(AssertionError, net.summary, mx.nd.ones((32, 3, 224, 224))) +def check_layer_forward_withinput(net, x): + x_hybrid = x.copy() + x.attach_grad() + x_hybrid.attach_grad() + net.collect_params().initialize() + with mx.autograd.record(): + out1 = net(x) + out1.backward() + net.hybridize() + with mx.autograd.record(): + out2 = net(x_hybrid) + out2.backward() + mx.test_utils.assert_almost_equal(x.grad.asnumpy(), x_hybrid.grad.asnumpy(), rtol=1e-5, atol=1e-6) + mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6) + +@with_seed() +def test_conv2d_16c(): + chn_list = [16, 32, 64, 128, 256, 512, 1024] + kernel_list = [1, 3, 5, 7, 11] + kernel_list.append(224) + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.conv0(x) + return out + + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224)) + for i in range(len(chn_list)): + for j in range(len(kernel_list)): + net = Net(chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_group_conv2d_16c(): + grp_list = [16, 32, 64, 128, 256, 512, 1024] + input_size_list = np.random.randint(low=1, high=225, size=10).tolist() + kernel_list = [1, 3] + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (1, 1)) + self.conv1 = gluon.nn.Conv2D(chn_num, (kernel, kernel), groups=chn_num) + + def hybrid_forward(self, F, x): + y = self.conv0(x) + out = self.conv1(y) + return out + + for i in range(len(input_size_list)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, input_size_list[i], input_size_list[i])) + for j in range(len(grp_list)): + for k in range(len(kernel_list)): + net = Net(grp_list[j], kernel_list[k]) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_deconv2d_16c(): + in_chn_list = [1024, 512, 256, 128, 64, 32, 16] + out_chn_list = [512, 256, 128, 64, 32, 16, 3] + kernel_list = [1, 3, 5, 7] + in_shape = [4, 8, 16, 32, 64, 224] + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, chn_num, kernel, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.deconv0(x) + return out + for i in range(len(in_shape)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i])) + for j in range(len(kernel_list)): + net = Net(out_chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_batchnorm_16c(): + chn_list = [16, 32, 64, 128, 256, 512, 1024] + shape = np.random.randint(low=1, high=300, size=10) + shape_list = [] + for i in range(len(shape)): + shape_list.append((shape[i], shape[i])) + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + axis, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + self.bn0 = gluon.nn.BatchNorm(axis=axis) + + def hybrid_forward(self, F, x): + conv = self.conv0(x) + out = self.bn0(conv) + return out + + for i in range(len(chn_list)): + for j in range(len(shape_list)): + shape = (batch_size, ) + (3,) + shape_list[j] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + net = Net(chn_list[i], 1, 1) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_concat(): + chn_list = [16, 32, 64, 128, 256, 512, 1024] + shapes = [224, 64, 32, 27, 16, 7, 3] + input_num = np.random.randint(low=2, high=11) + shape_list = [] + for i in range(len(shapes)): + shape_list.append((shapes[i], shapes[i])) + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + check_dim, + input_num, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + from mxnet.gluon.contrib.nn import HybridConcurrent + self.concat = HybridConcurrent(axis=check_dim) + for i in range(input_num): + self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel))) + + def hybrid_forward(self, F, x): + return self.concat(x) + + for i in range(len(chn_list)): + shape = (batch_size,) + (3,) + shape_list[i] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + for axis in range(4): + net = Net(axis, input_num, chn_list[i], 1) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_reshape_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + y = self.conv0(x_reshape) + y_reshape = y.reshape((0, 0, 223, 220)) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_conv_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) + y = self.conv0(x_slice) + y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 222, 222)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_slice_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 0, 1, 1), end=(32, 3, 225, 225)) + y = self.conv0(x_slice) + y_reshape = y.reshape((0, 0, 444, 111)) + out = self.conv1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(32, 3, 299, 299)) + net = Net() + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_conv_slice_conv(): + """ + This test will test gluon Conv2d computation with ndarray reshape and slice + """ + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + y = self.conv0(x_reshape) + y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 446, 110)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 129) + self.dense0 = nn.Dense(channel0) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 600, -1)) + out = self.dense0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 129) + self.dense0 = nn.Dense(channel0) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.dense0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) + slice = [[0, 64, 50, 0], [8, 128, 300, 300]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 50 + channel1 = np.random.randint(1, 129) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_slice = y.slice(begin=(4, 0), end=(-1, 10)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) + slice = [[0, 64, 50, 0], [8, 128, 300, 300]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 129) + channel1 = np.random.randint(1, 129) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 600, -1)) + y = self.dense0(x_reshape) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 129) + channel1 = np.random.randint(1, 129) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) + slice = [[0, 64, 50, 0], [8, 128, 300, 300]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 128 + channel1 = np.random.randint(1, 129) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 600, -1)) + y = self.dense0(x_reshape) + y_slice = y.slice(begin=(0, 64), end=(8, 128)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + out = self.bn0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (32, 512, 128, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(3) + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.bn0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 64, 50, 0], [8, 128, 256, 256]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(3) + self.bn1 = nn.BatchNorm(1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1])) + y = self.bn0(x_slice) + y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[[0, 64, 50, 0], [8, 128, 200, 256]], [[4, 50, 0, 128], [7, -1, -1, -1]]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(0) + self.bn1 = nn.BatchNorm(2) + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape[0]) + y = self.bn0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 512)) + shape = [(8, 256, 128, -1), (32, 128, 512, -1)] + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(0) + self.bn1 = nn.BatchNorm(2) + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.bn0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 64, 50, 0], [8, 128, 200, 256]] + shape = (1, 128, 256, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(2) + self.bn1 = nn.BatchNorm(0) + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + y = self.bn0(x_reshape) + y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 0, 50, 0], [8, 1, -1, 100]] + shape = (128, 1, 256, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.pool0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (128, 256, 256, -1) + for i in range(len(pooling_layers)): + net = Net(shape, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(12, 0, 128, 64), (16, 16, 256, 256)] + for i in range(len(pooling_layers)): + net = Net(slice, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 2), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.pool0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = [(128, 256, 64, -1), (128, 256, 11, -1)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + shape[1] = (256, 128, 1, 1) + net = Net(shape, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.pool0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice[1] = [(0, 64, 0, 0), (2, -1, 1, 1)] + net = Net(slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.pool0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(8, 0, 100, 50), (16, 128, 256, 256)] + shape = (32, -1, 0, 0) + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.pool0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (0, 512, 64, -1) + slice = [(8, 256, 10, 20), (-1, -1, -1, 70)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice = [(8, 256, 0, 0), (-1, -1, 1, 1)] + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(64, 2, 256, 256)) + shape = (8, 16, 64, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) + slice = [(0, 16, 0, 0), (1, 32, 256, 256)] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_reshape_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.conv0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) + shape = [(32, 0, 256, -1), (64, 32, 129, -1)] + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_slice_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.conv0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) + slice = [[(0, 16, 0, 0), (8, 32, 128, 128)], [(4, 0, 2, 0), (8, 32, 130, 128)]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_reshape_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.conv0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(16, 4, 500, 500)) + shape = (32, 16, 125, -1) + slice = [(4, 32, 0, 0), (20, 64, 64, 224)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by MXNET-519') +def test_slice_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.conv0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) + shape = (24, 16, 452, -1) + slice = [(4, 0, 0, 0), (16, 32, 224, 224)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.act(x_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = (64, 8, 128, -1) + net = Net(act, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act(x_slice) + return out + + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [(8, 16, 0, 0), (16, 32, 100, 100)] + net = Net(act, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.act0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = [(64, 8, 128, -1), (16, 64, 128, -1)] + net = Net(act0, act1, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.act0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [[(0, 0, 100, 100), (8, 16, 256, 512)], [(2, 4, 0, 0), (8, 10, 128, 128)]] + net = Net(act0, act1, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.act0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = (64, 16, 128, -1) + slice = [(0, 0, 0, 100), (8, 16, 64, 228)] + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.act0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [(0, 0, 0, 100), (8, 16, 64, 228)] + shape = (64, 16, 64, -1) + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) if __name__ == '__main__': import nose From 41d4261288e6b96d95d3e5733088527442cd3ea5 Mon Sep 17 00:00:00 2001 From: Shufan Date: Wed, 6 Jun 2018 15:06:57 +0800 Subject: [PATCH 02/12] Referring the issue link to skip message --- tests/python/unittest/test_gluon.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 57ef72985a66..16e3d3af7055 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1281,7 +1281,7 @@ def hybrid_forward(self, F, x): @with_seed() def test_group_conv2d_16c(): grp_list = [16, 32, 64, 128, 256, 512, 1024] - input_size_list = np.random.randint(low=1, high=225, size=10).tolist() + input_size_list = np.random.randint(low=3, high=225, size=10).tolist() kernel_list = [1, 3] batch_size = 32 class Net(gluon.HybridBlock): @@ -1308,7 +1308,7 @@ def hybrid_forward(self, F, x): @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_deconv2d_16c(): in_chn_list = [1024, 512, 256, 128, 64, 32, 16] out_chn_list = [512, 256, 128, 64, 32, 16, 3] @@ -1332,7 +1332,7 @@ def hybrid_forward(self, F, x): @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_batchnorm_16c(): chn_list = [16, 32, 64, 128, 256, 512, 1024] shape = np.random.randint(low=1, high=300, size=10) @@ -1416,7 +1416,7 @@ def hybrid_forward(self, F, x): @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_reshape_conv_reshape_conv(): class Net(gluon.HybridBlock): def __init__(self, **kwargs): @@ -1474,7 +1474,7 @@ def hybrid_forward(self, F, x): @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_slice_conv_reshape_conv(): class Net(gluon.HybridBlock): def __init__(self, **kwargs): @@ -1630,6 +1630,7 @@ def hybrid_forward(self, F, x): @with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_reshape_dense_slice_dense(): class Net(gluon.HybridBlock): def __init__(self, **kwargs): @@ -1653,7 +1654,7 @@ def hybrid_forward(self, F, x): @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_reshape_batchnorm(): class Net(gluon.HybridBlock): def __init__(self, shape, **kwargs): @@ -2002,7 +2003,7 @@ def hybrid_forward(self, F, x): check_layer_forward_withinput(net, x) @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_reshape_deconv(): class Net(gluon.HybridBlock): def __init__(self, shape, **kwargs): @@ -2021,7 +2022,7 @@ def hybrid_forward(self, F, x): check_layer_forward_withinput(net, x) @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_slice_deconv(): class Net(gluon.HybridBlock): def __init__(self, slice, **kwargs): @@ -2040,7 +2041,7 @@ def hybrid_forward(self, F, x): check_layer_forward_withinput(net, x) @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_reshape_deconv_reshape_deconv(): class Net(gluon.HybridBlock): def __init__(self, shape, **kwargs): @@ -2062,7 +2063,7 @@ def hybrid_forward(self, F, x): check_layer_forward_withinput(net, x) @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_slice_deconv_slice_deconv(): class Net(gluon.HybridBlock): def __init__(self, slice, **kwargs): @@ -2084,7 +2085,7 @@ def hybrid_forward(self, F, x): check_layer_forward_withinput(net, x) @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_reshape_deconv_slice_deconv(): class Net(gluon.HybridBlock): def __init__(self, shape, slice, **kwargs): @@ -2108,7 +2109,7 @@ def hybrid_forward(self, F, x): check_layer_forward_withinput(net, x) @with_seed() -@unittest.skip('skippping temporarily, tracked by MXNET-519') +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') def test_slice_deconv_reshape_deconv(): class Net(gluon.HybridBlock): def __init__(self, shape, slice, **kwargs): From 103d16fc78132e1db67d92fda87db1dc34feb857 Mon Sep 17 00:00:00 2001 From: Shufan Date: Mon, 11 Jun 2018 18:47:04 +0800 Subject: [PATCH 03/12] Retrigger the PRECI --- tests/python/unittest/test_gluon.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 16e3d3af7055..5501b238f159 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1397,7 +1397,6 @@ def hybrid_forward(self, F, x): net = Net(axis, input_num, chn_list[i], 1) check_layer_forward_withinput(net, x) - @with_seed() def test_reshape_conv(): class Net(gluon.HybridBlock): From 13921d9e0cbb22199fc2c7b6c4078c07e784112d Mon Sep 17 00:00:00 2001 From: Wu Date: Fri, 6 Jul 2018 10:42:00 +0800 Subject: [PATCH 04/12] Remove previous changes --- tests/python/unittest/test_gluon.py | 1048 --------------------------- 1 file changed, 1048 deletions(-) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 5501b238f159..f5f3d963af3d 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1237,1054 +1237,6 @@ def test_summary(): net.hybridize() assert_raises(AssertionError, net.summary, mx.nd.ones((32, 3, 224, 224))) -def check_layer_forward_withinput(net, x): - x_hybrid = x.copy() - x.attach_grad() - x_hybrid.attach_grad() - net.collect_params().initialize() - with mx.autograd.record(): - out1 = net(x) - out1.backward() - net.hybridize() - with mx.autograd.record(): - out2 = net(x_hybrid) - out2.backward() - mx.test_utils.assert_almost_equal(x.grad.asnumpy(), x_hybrid.grad.asnumpy(), rtol=1e-5, atol=1e-6) - mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6) - -@with_seed() -def test_conv2d_16c(): - chn_list = [16, 32, 64, 128, 256, 512, 1024] - kernel_list = [1, 3, 5, 7, 11] - kernel_list.append(224) - batch_size = 32 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) - - def hybrid_forward(self, F, x): - out = self.conv0(x) - return out - - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224)) - for i in range(len(chn_list)): - for j in range(len(kernel_list)): - net = Net(chn_list[i], kernel_list[j]) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_group_conv2d_16c(): - grp_list = [16, 32, 64, 128, 256, 512, 1024] - input_size_list = np.random.randint(low=3, high=225, size=10).tolist() - kernel_list = [1, 3] - batch_size = 32 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (1, 1)) - self.conv1 = gluon.nn.Conv2D(chn_num, (kernel, kernel), groups=chn_num) - - def hybrid_forward(self, F, x): - y = self.conv0(x) - out = self.conv1(y) - return out - - for i in range(len(input_size_list)): - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, input_size_list[i], input_size_list[i])) - for j in range(len(grp_list)): - for k in range(len(kernel_list)): - net = Net(grp_list[j], kernel_list[k]) - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_deconv2d_16c(): - in_chn_list = [1024, 512, 256, 128, 64, 32, 16] - out_chn_list = [512, 256, 128, 64, 32, 16, 3] - kernel_list = [1, 3, 5, 7] - in_shape = [4, 8, 16, 32, 64, 224] - batch_size = 32 - class Net(gluon.HybridBlock): - def __init__(self, chn_num, kernel, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel)) - - def hybrid_forward(self, F, x): - out = self.deconv0(x) - return out - for i in range(len(in_shape)): - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i])) - for j in range(len(kernel_list)): - net = Net(out_chn_list[i], kernel_list[j]) - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_batchnorm_16c(): - chn_list = [16, 32, 64, 128, 256, 512, 1024] - shape = np.random.randint(low=1, high=300, size=10) - shape_list = [] - for i in range(len(shape)): - shape_list.append((shape[i], shape[i])) - batch_size = 32 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - axis, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) - self.bn0 = gluon.nn.BatchNorm(axis=axis) - - def hybrid_forward(self, F, x): - conv = self.conv0(x) - out = self.bn0(conv) - return out - - for i in range(len(chn_list)): - for j in range(len(shape_list)): - shape = (batch_size, ) + (3,) + shape_list[j] - x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) - net = Net(chn_list[i], 1, 1) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_concat(): - chn_list = [16, 32, 64, 128, 256, 512, 1024] - shapes = [224, 64, 32, 27, 16, 7, 3] - input_num = np.random.randint(low=2, high=11) - shape_list = [] - for i in range(len(shapes)): - shape_list.append((shapes[i], shapes[i])) - batch_size = 32 - class Net(gluon.HybridBlock): - def __init__(self, - check_dim, - input_num, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - from mxnet.gluon.contrib.nn import HybridConcurrent - self.concat = HybridConcurrent(axis=check_dim) - for i in range(input_num): - self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel))) - - def hybrid_forward(self, F, x): - return self.concat(x) - - for i in range(len(chn_list)): - shape = (batch_size,) + (3,) + shape_list[i] - x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) - for axis in range(4): - net = Net(axis, input_num, chn_list[i], 1) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) - out = self.conv0(x_reshape) - return out - x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_conv_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) - y = self.conv0(x_reshape) - y_reshape = y.reshape((0, 0, 223, 220)) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) - out = self.conv0(x_slice) - return out - x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_conv_slice_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) - y = self.conv0(x_slice) - y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 222, 222)) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_conv_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 0, 1, 1), end=(32, 3, 225, 225)) - y = self.conv0(x_slice) - y_reshape = y.reshape((0, 0, 444, 111)) - out = self.conv1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(32, 3, 299, 299)) - net = Net() - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_conv_slice_conv(): - """ - This test will test gluon Conv2d computation with ndarray reshape and slice - """ - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) - y = self.conv0(x_reshape) - y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 446, 110)) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 129) - self.dense0 = nn.Dense(channel0) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 600, -1)) - out = self.dense0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 129) - self.dense0 = nn.Dense(channel0) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), - end=tuple(self.slice[1])) - out = self.dense0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) - slice = [[0, 64, 50, 0], [8, 128, 300, 300]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_dense_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = 50 - channel1 = np.random.randint(1, 129) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.dense0(x_slice) - y_slice = y.slice(begin=(4, 0), end=(-1, 10)) - out = self.dense1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) - slice = [[0, 64, 50, 0], [8, 128, 300, 300]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_dense_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 129) - channel1 = np.random.randint(1, 129) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 600, -1)) - y = self.dense0(x_reshape) - y_reshape = y.reshape((1, -1)) - out = self.dense1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_dense_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 129) - channel1 = np.random.randint(1, 129) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.dense0(x_slice) - y_reshape = y.reshape((1, -1)) - out = self.dense1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) - slice = [[0, 64, 50, 0], [8, 128, 300, 300]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_dense_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = 128 - channel1 = np.random.randint(1, 129) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 600, -1)) - y = self.dense0(x_reshape) - y_slice = y.slice(begin=(0, 64), end=(8, 128)) - out = self.dense1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 300, 300)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm() - self.reshape = shape - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape) - out = self.bn0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (32, 512, 128, -1) - net = Net(shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(3) - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0]), - end=tuple(self.slice[1])) - out = self.bn0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 64, 50, 0], [8, 128, 256, 256]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(3) - self.bn1 = nn.BatchNorm(1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1])) - y = self.bn0(x_slice) - y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1])) - out = self.bn1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[[0, 64, 50, 0], [8, 128, 200, 256]], [[4, 50, 0, 128], [7, -1, -1, -1]]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_batchnorm_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(0) - self.bn1 = nn.BatchNorm(2) - self.reshape = shape - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape[0]) - y = self.bn0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.bn1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 512)) - shape = [(8, 256, 128, -1), (32, 128, 512, -1)] - net = Net(shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(0) - self.bn1 = nn.BatchNorm(2) - self.reshape = shape - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.bn0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.bn1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 64, 50, 0], [8, 128, 200, 256]] - shape = (1, 128, 256, -1) - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_batchnorm_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(2) - self.bn1 = nn.BatchNorm(0) - self.reshape = shape - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape) - y = self.bn0(x_reshape) - y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - out = self.bn1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 0, 50, 0], [8, 1, -1, 100]] - shape = (128, 1, 256, -1) - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - pooling_layer, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.pool0 = pooling_layer - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.pool0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (128, 256, 256, -1) - for i in range(len(pooling_layers)): - net = Net(shape, pooling_layers[i]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - slice, - pooling_layer, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.pool0 = pooling_layer - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.pool0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [(12, 0, 128, 64), (16, 16, 256, 256)] - for i in range(len(pooling_layers)): - net = Net(slice, pooling_layers[i]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 2), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.pool0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.pool1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = [(128, 256, 64, -1), (128, 256, 11, -1)] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - shape[1] = (256, 128, 1, 1) - net = Net(shape, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.pool0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.pool1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - slice[1] = [(0, 64, 0, 0), (2, -1, 1, 1)] - net = Net(slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.pool0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.pool1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [(8, 0, 100, 50), (16, 128, 256, 256)] - shape = (32, -1, 0, 0) - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.pool0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.pool1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (0, 512, 64, -1) - slice = [(8, 256, 10, 20), (-1, -1, -1, 70)] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - slice = [(8, 256, 0, 0), (-1, -1, 1, 1)] - net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.conv0(x_reshape) - return out - x = mx.nd.random.uniform(shape=(64, 2, 256, 256)) - shape = (8, 16, 64, -1) - net = Net(shape) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.conv0(x_slice) - return out - x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) - slice = [(0, 16, 0, 0), (1, 32, 256, 256)] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.conv0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) - shape = [(32, 0, 256, -1), (64, 32, 129, -1)] - net = Net(shape) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.conv0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) - slice = [[(0, 16, 0, 0), (8, 32, 128, 128)], [(4, 0, 2, 0), (8, 32, 130, 128)]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.conv0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(16, 4, 500, 500)) - shape = (32, 16, 125, -1) - slice = [(4, 32, 0, 0), (20, 64, 64, 224)] - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.conv0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) - shape = (24, 16, 452, -1) - slice = [(4, 0, 0, 0), (16, 32, 224, 224)] - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.act = nn.Activation(act) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.act(x_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for act in acts: - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = (64, 8, 128, -1) - net = Net(act, shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.act = nn.Activation(act) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.act(x_slice) - return out - - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for act in acts: - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [(8, 16, 0, 0), (16, 32, 100, 100)] - net = Net(act, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_activation_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.act0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.act1(y_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = [(64, 8, 128, -1), (16, 64, 128, -1)] - net = Net(act0, act1, shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.act0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.act1(y_slice) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [[(0, 0, 100, 100), (8, 16, 256, 512)], [(2, 4, 0, 0), (8, 10, 128, 128)]] - net = Net(act0, act1, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_activation_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.act0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.act1(y_slice) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = (64, 16, 128, -1) - slice = [(0, 0, 0, 100), (8, 16, 64, 228)] - net = Net(act0, act1, shape, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.act0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.act1(y_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [(0, 0, 0, 100), (8, 16, 64, 228)] - shape = (64, 16, 64, -1) - net = Net(act0, act1, shape, slice) - check_layer_forward_withinput(net, x) - if __name__ == '__main__': import nose nose.runmodule() From df0c6e64fd3063630882e15efe728613e0314632 Mon Sep 17 00:00:00 2001 From: Shufan Date: Fri, 6 Jul 2018 11:00:23 +0800 Subject: [PATCH 05/12] Modify the cases trying to eliminate the errors on GPU --- tests/python/unittest/test_gluon.py | 1059 ++++++++++++++++++++++++++- 1 file changed, 1046 insertions(+), 13 deletions(-) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 67e8b9ea2d3f..bc4200f3fe63 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1413,20 +1413,1053 @@ def hybrid_forward(self, F, in1): assert_almost_equal(out_grad.asnumpy(), d1.grad.asnumpy()) assert_almost_equal(out_grad.asnumpy(), d2.grad.asnumpy()) +def check_layer_forward_withinput(net, x): + x_hybrid = x.copy() + x.attach_grad() + x_hybrid.attach_grad() + net.collect_params().initialize() + with mx.autograd.record(): + out1 = net(x) + out1.backward() + net.hybridize() + with mx.autograd.record(): + out2 = net(x_hybrid) + out2.backward() + mx.test_utils.assert_almost_equal(x.grad.asnumpy(), x_hybrid.grad.asnumpy(), rtol=1e-5, atol=1e-6) + mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6) + +@with_seed() +def test_conv2d_16c(): + chn_list = [16, 256] + kernel_list = [1, 3] + #kernel_list.append(224) + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.conv0(x) + return out + + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224)) + for i in range(len(chn_list)): + for j in range(len(kernel_list)): + net = Net(chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_group_conv2d_16c(): + grp_list = [16] + input_size_list = np.random.randint(low=3, high=65, size=10).tolist() + kernel_list = [1, 3] + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (1, 1)) + self.conv1 = gluon.nn.Conv2D(chn_num, (kernel, kernel), groups=chn_num) + + def hybrid_forward(self, F, x): + y = self.conv0(x) + out = self.conv1(y) + return out + + for i in range(len(input_size_list)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, input_size_list[i], input_size_list[i])) + for j in range(len(grp_list)): + for k in range(len(kernel_list)): + net = Net(grp_list[j], kernel_list[k]) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_deconv2d_16c(): + in_chn_list = [1024, 512, 256, 128, 64, 32, 16] + out_chn_list = [512, 256, 128, 64, 32, 16, 3] + kernel_list = [1, 3, 5, 7] + in_shape = [4, 8, 16, 32, 64, 224] + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, chn_num, kernel, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.deconv0(x) + return out + for i in range(len(in_shape)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i])) + for j in range(len(kernel_list)): + net = Net(out_chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_batchnorm_16c(): + chn_list = [16, 1024] + shape = np.random.randint(low=1, high=300, size=10) + shape_list = [] + for i in range(len(shape)): + shape_list.append((shape[i], shape[i])) + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + axis, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + self.bn0 = gluon.nn.BatchNorm(axis=axis) + + def hybrid_forward(self, F, x): + conv = self.conv0(x) + out = self.bn0(conv) + return out + + for i in range(len(chn_list)): + for j in range(len(shape_list)): + shape = (batch_size, ) + (3,) + shape_list[j] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + net = Net(chn_list[i], 1, 1) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_concat(): + chn_list = [16, 1024] + shapes = [64, 32, 3] + input_num = np.random.randint(low=2, high=11) + shape_list = [] + for i in range(len(shapes)): + shape_list.append((shapes[i], shapes[i])) + batch_size = 32 + class Net(gluon.HybridBlock): + def __init__(self, + check_dim, + input_num, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + from mxnet.gluon.contrib.nn import HybridConcurrent + self.concat = HybridConcurrent(axis=check_dim) + for i in range(input_num): + self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel))) + + def hybrid_forward(self, F, x): + return self.concat(x) + + for i in range(len(chn_list)): + shape = (batch_size,) + (3,) + shape_list[i] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + for axis in range(4): + net = Net(axis, input_num, chn_list[i], 1) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + y = self.conv0(x_reshape) + y_reshape = y.reshape((0, 0, 223, 220)) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_conv_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) + y = self.conv0(x_slice) + y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 222, 222)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 0, 1, 1), end=(32, 3, 225, 225)) + y = self.conv0(x_slice) + y_reshape = y.reshape((0, 0, 444, 111)) + out = self.conv1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(32, 3, 299, 299)) + net = Net() + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_conv_slice_conv(): + """ + This test will test gluon Conv2d computation with ndarray reshape and slice + """ + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + y = self.conv0(x_reshape) + y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 446, 110)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 65) + self.dense0 = nn.Dense(channel0) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 128, -1)) + out = self.dense0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 65) + self.dense0 = nn.Dense(channel0) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.dense0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + slice = [[0, 64, 50, 0], [8, 128, 64, 64]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 50 + channel1 = np.random.randint(1, 65) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_slice = y.slice(begin=(4, 0), end=(-1, 10)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + slice = [[0, 64, 50, 0], [8, 128, 64, 64]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 65) + channel1 = np.random.randint(1, 65) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 128, -1)) + y = self.dense0(x_reshape) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 65) + channel1 = np.random.randint(1, 65) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + slice = [[0, 64, 50, 0], [8, 128, 64, 64]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() + +def test_reshape_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 64 + channel1 = np.random.randint(1, 65) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 128, -1)) + y = self.dense0(x_reshape) + y_slice = y.slice(begin=(0, 32), end=(8, 64)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + out = self.bn0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (32, 512, 128, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(3) + self.slice = slice -def test_grad_graph_change(): - class Model(mx.gluon.HybridBlock): - def hybrid_forward(self, F, array, index): - row = array.take(index) - return row, index - array = mx.nd.arange(3) - index = mx.nd.array([2]) - array.attach_grad() - model = Model() - model.hybridize(inline_limit=0) - with mx.autograd.record(train_mode=True): - row, _ = model(array, index) - row.backward() + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.bn0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 64, 50, 0], [8, 128, 256, 256]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(3) + self.bn1 = nn.BatchNorm(1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1])) + y = self.bn0(x_slice) + y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[[0, 64, 50, 0], [8, 128, 200, 256]], [[4, 50, 0, 128], [7, -1, -1, -1]]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(0) + self.bn1 = nn.BatchNorm(2) + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape[0]) + y = self.bn0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 512)) + shape = [(8, 256, 128, -1), (32, 128, 512, -1)] + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(0) + self.bn1 = nn.BatchNorm(2) + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.bn0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 64, 50, 0], [8, 128, 200, 256]] + shape = (1, 128, 256, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(2) + self.bn1 = nn.BatchNorm(0) + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + y = self.bn0(x_reshape) + y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 0, 50, 0], [8, 1, -1, 100]] + shape = (128, 1, 256, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.pool0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (128, 256, 256, -1) + for i in range(len(pooling_layers)): + net = Net(shape, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(12, 0, 128, 64), (16, 16, 256, 256)] + for i in range(len(pooling_layers)): + net = Net(slice, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 2), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.pool0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = [(128, 256, 64, -1), (128, 256, 11, -1)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + shape[1] = (256, 128, 1, 1) + net = Net(shape, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.pool0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice[1] = [(0, 64, 0, 0), (2, -1, 1, 1)] + net = Net(slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.pool0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(8, 0, 100, 50), (16, 128, 256, 256)] + shape = (32, -1, 0, 0) + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.pool0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (0, 512, 64, -1) + slice = [(8, 256, 10, 20), (-1, -1, -1, 70)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice = [(8, 256, 0, 0), (-1, -1, 1, 1)] + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(64, 2, 256, 256)) + shape = (8, 16, 64, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) + slice = [(0, 16, 0, 0), (1, 32, 256, 256)] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.conv0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) + shape = [(32, 0, 256, -1), (64, 32, 129, -1)] + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.conv0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) + slice = [[(0, 16, 0, 0), (8, 32, 128, 128)], [(4, 0, 2, 0), (8, 32, 130, 128)]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.conv0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(16, 4, 500, 500)) + shape = (32, 16, 125, -1) + slice = [(4, 32, 0, 0), (20, 64, 64, 224)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.conv0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) + shape = (24, 16, 452, -1) + slice = [(4, 0, 0, 0), (16, 32, 224, 224)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.act(x_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = (64, 8, 128, -1) + net = Net(act, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act(x_slice) + return out + + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [(8, 16, 0, 0), (16, 32, 100, 100)] + net = Net(act, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.act0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = [(64, 8, 128, -1), (16, 64, 128, -1)] + net = Net(act0, act1, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.act0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [[(0, 0, 100, 100), (8, 16, 256, 512)], [(2, 4, 0, 0), (8, 10, 128, 128)]] + net = Net(act0, act1, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.act0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = (64, 16, 128, -1) + slice = [(0, 0, 0, 100), (8, 16, 64, 228)] + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.act0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [(0, 0, 0, 100), (8, 16, 64, 228)] + shape = (64, 16, 64, -1) + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) if __name__ == '__main__': From cf92d3f75b0533fa2cc07e9d5e8a3c56c7d2fda2 Mon Sep 17 00:00:00 2001 From: Shufan Date: Fri, 6 Jul 2018 11:16:44 +0800 Subject: [PATCH 06/12] Resolving conflict --- tests/python/unittest/test_gluon.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index bc4200f3fe63..7debe5b28913 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1413,6 +1413,21 @@ def hybrid_forward(self, F, in1): assert_almost_equal(out_grad.asnumpy(), d1.grad.asnumpy()) assert_almost_equal(out_grad.asnumpy(), d2.grad.asnumpy()) + +def test_grad_graph_change(): + class Model(mx.gluon.HybridBlock): + def hybrid_forward(self, F, array, index): + row = array.take(index) + return row, index + array = mx.nd.arange(3) + index = mx.nd.array([2]) + array.attach_grad() + model = Model() + model.hybridize(inline_limit=0) + with mx.autograd.record(train_mode=True): + row, _ = model(array, index) + row.backward() + def check_layer_forward_withinput(net, x): x_hybrid = x.copy() x.attach_grad() @@ -2460,7 +2475,7 @@ def hybrid_forward(self, F, x): shape = (64, 16, 64, -1) net = Net(act0, act1, shape, slice) check_layer_forward_withinput(net, x) - + if __name__ == '__main__': import nose From 7cd87fcd518d9a99604f1e914127c19be9e55aff Mon Sep 17 00:00:00 2001 From: Shufan Date: Sat, 7 Jul 2018 12:04:33 +0800 Subject: [PATCH 07/12] Further reduce the tensor size --- tests/python/unittest/test_gluon.py | 52 +++++++++++++++-------------- 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 7debe5b28913..3a3d9784cfe9 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1448,7 +1448,7 @@ def test_conv2d_16c(): chn_list = [16, 256] kernel_list = [1, 3] #kernel_list.append(224) - batch_size = 32 + batch_size = 16 class Net(gluon.HybridBlock): def __init__(self, chn_num, @@ -1474,7 +1474,7 @@ def test_group_conv2d_16c(): grp_list = [16] input_size_list = np.random.randint(low=3, high=65, size=10).tolist() kernel_list = [1, 3] - batch_size = 32 + batch_size = 16 class Net(gluon.HybridBlock): def __init__(self, chn_num, @@ -1557,13 +1557,13 @@ def hybrid_forward(self, F, x): @with_seed() def test_concat(): - chn_list = [16, 1024] - shapes = [64, 32, 3] + chn_list = [64, 16] + shapes = [7, 5, 3] input_num = np.random.randint(low=2, high=11) shape_list = [] for i in range(len(shapes)): shape_list.append((shapes[i], shapes[i])) - batch_size = 32 + batch_size = 16 class Net(gluon.HybridBlock): def __init__(self, check_dim, @@ -1581,12 +1581,14 @@ def __init__(self, def hybrid_forward(self, F, x): return self.concat(x) - for i in range(len(chn_list)): + for s in range(len(shape_list)): shape = (batch_size,) + (3,) + shape_list[i] x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) - for axis in range(4): - net = Net(axis, input_num, chn_list[i], 1) - check_layer_forward_withinput(net, x) + for i in range(len(chn_list)): + for axis in range(4): + net = Net(axis, input_num, chn_list[i], 1) + check_layer_forward_withinput(net, x) + @with_seed() def test_reshape_conv(): @@ -1713,7 +1715,7 @@ class Net(gluon.HybridBlock): def __init__(self, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): - channel0 = np.random.randint(1, 65) + channel0 = np.random.randint(1, 17) self.dense0 = nn.Dense(channel0) def hybrid_forward(self, F, x): @@ -1721,7 +1723,7 @@ def hybrid_forward(self, F, x): out = self.dense0(x_reshape) return out - x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) net = Net() check_layer_forward_withinput(net, x) @@ -1732,7 +1734,7 @@ class Net(gluon.HybridBlock): def __init__(self, slice, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): - channel0 = np.random.randint(1, 65) + channel0 = np.random.randint(1, 17) self.dense0 = nn.Dense(channel0) self.slice = slice @@ -1742,8 +1744,8 @@ def hybrid_forward(self, F, x): out = self.dense0(x_slice) return out - x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) - slice = [[0, 64, 50, 0], [8, 128, 64, 64]] + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 50, 0], [8, 32, 64, 64]] net = Net(slice) check_layer_forward_withinput(net, x) @@ -1754,7 +1756,7 @@ def __init__(self, slice, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): channel0 = 50 - channel1 = np.random.randint(1, 65) + channel1 = np.random.randint(1, 33) self.dense0 = nn.Dense(channel0) self.dense1 = nn.Dense(channel1) self.slice = slice @@ -1766,8 +1768,8 @@ def hybrid_forward(self, F, x): out = self.dense1(y_slice) return out - x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) - slice = [[0, 64, 50, 0], [8, 128, 64, 64]] + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 50, 0], [8, 32, 64, 64]] net = Net(slice) check_layer_forward_withinput(net, x) @@ -1777,7 +1779,7 @@ class Net(gluon.HybridBlock): def __init__(self, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): - channel0 = np.random.randint(1, 65) + channel0 = np.random.randint(1, 17) channel1 = np.random.randint(1, 65) self.dense0 = nn.Dense(channel0) self.dense1 = nn.Dense(channel1) @@ -1789,7 +1791,7 @@ def hybrid_forward(self, F, x): out = self.dense1(y_reshape) return out - x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) net = Net() check_layer_forward_withinput(net, x) @@ -1800,8 +1802,8 @@ class Net(gluon.HybridBlock): def __init__(self, slice, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): - channel0 = np.random.randint(1, 65) - channel1 = np.random.randint(1, 65) + channel0 = np.random.randint(1, 17) + channel1 = np.random.randint(1, 17) self.dense0 = nn.Dense(channel0) self.dense1 = nn.Dense(channel1) self.slice = slice @@ -1813,8 +1815,8 @@ def hybrid_forward(self, F, x): out = self.dense1(y_reshape) return out - x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) - slice = [[0, 64, 50, 0], [8, 128, 64, 64]] + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 50, 0], [8, 32, 64, 64]] net = Net(slice) check_layer_forward_withinput(net, x) @@ -1827,7 +1829,7 @@ def __init__(self, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): channel0 = 64 - channel1 = np.random.randint(1, 65) + channel1 = np.random.randint(1, 17) self.dense0 = nn.Dense(channel0) self.dense1 = nn.Dense(channel1) @@ -1838,7 +1840,7 @@ def hybrid_forward(self, F, x): out = self.dense1(y_slice) return out - x = mx.nd.random.uniform(shape=(16, 128, 64, 64)) + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) net = Net() check_layer_forward_withinput(net, x) From e920d5aefdd134f3d7bc37782de87cc38464156a Mon Sep 17 00:00:00 2001 From: Shufan Date: Sun, 8 Jul 2018 12:49:05 +0800 Subject: [PATCH 08/12] minor changes --- tests/python/unittest/test_gluon.py | 36 ++++++++++++++--------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 3a3d9784cfe9..a4613a6205d0 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1448,7 +1448,7 @@ def test_conv2d_16c(): chn_list = [16, 256] kernel_list = [1, 3] #kernel_list.append(224) - batch_size = 16 + batch_size = 8 class Net(gluon.HybridBlock): def __init__(self, chn_num, @@ -1474,7 +1474,7 @@ def test_group_conv2d_16c(): grp_list = [16] input_size_list = np.random.randint(low=3, high=65, size=10).tolist() kernel_list = [1, 3] - batch_size = 16 + batch_size = 8 class Net(gluon.HybridBlock): def __init__(self, chn_num, @@ -1505,7 +1505,7 @@ def test_deconv2d_16c(): out_chn_list = [512, 256, 128, 64, 32, 16, 3] kernel_list = [1, 3, 5, 7] in_shape = [4, 8, 16, 32, 64, 224] - batch_size = 32 + batch_size = 8 class Net(gluon.HybridBlock): def __init__(self, chn_num, kernel, **kwargs): super(Net, self).__init__(**kwargs) @@ -1530,7 +1530,7 @@ def test_batchnorm_16c(): shape_list = [] for i in range(len(shape)): shape_list.append((shape[i], shape[i])) - batch_size = 32 + batch_size = 8 class Net(gluon.HybridBlock): def __init__(self, chn_num, @@ -1563,7 +1563,7 @@ def test_concat(): shape_list = [] for i in range(len(shapes)): shape_list.append((shapes[i], shapes[i])) - batch_size = 16 + batch_size = 8 class Net(gluon.HybridBlock): def __init__(self, check_dim, @@ -1634,13 +1634,13 @@ class Net(gluon.HybridBlock): def __init__(self, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv0 = nn.Conv2D(16, (3, 3)) def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) + x_slice = x.slice(begin=(0, 2, 0, 0), end=(4, 5, 32, 32)) out = self.conv0(x_slice) return out - x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) net = Net() check_layer_forward_withinput(net, x) @@ -1651,16 +1651,16 @@ class Net(gluon.HybridBlock): def __init__(self, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) + self.conv0 = nn.Conv2D(16, (1, 1)) + self.conv1 = nn.Conv2D(16, (1, 1)) def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 2, 0, 0), end=(32, 5, 224, 224)) + x_slice = x.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) y = self.conv0(x_slice) - y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 222, 222)) + y_slice = y.slice(begin=(0, 1, 0, 0), end=(4, 4, 16, 16)) out = self.conv1(y_slice) return out - x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) net = Net() check_layer_forward_withinput(net, x) @@ -1695,16 +1695,16 @@ class Net(gluon.HybridBlock): def __init__(self, **kwargs): super(Net, self).__init__(**kwargs) with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) + self.conv0 = nn.Conv2D(16, (1, 1)) + self.conv1 = nn.Conv2D(16, (1, 1)) def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) + x_reshape = x.reshape((0, 0, 64, 16)) y = self.conv0(x_reshape) - y_slice = y.slice(begin=(0, 32, 0, 0), end=(32, 64, 446, 110)) + y_slice = y.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) out = self.conv1(y_slice) return out - x = mx.nd.random.uniform(shape=(32, 6, 224, 224)) + x = mx.nd.random.uniform(shape=(8, 3, 32, 32)) net = Net() check_layer_forward_withinput(net, x) From 4c2ee4f334954127d879e1a818a94be3dd9c678f Mon Sep 17 00:00:00 2001 From: Shufan Date: Wed, 11 Jul 2018 12:46:48 +0800 Subject: [PATCH 09/12] move to mkl --- tests/python/mkl/test_mkldnn.py | 1 + tests/python/mkl/test_mkldnn_gluon.py | 1085 +++++++++++++++++++++++++ tests/python/unittest/test_gluon.py | 1051 ------------------------ 3 files changed, 1086 insertions(+), 1051 deletions(-) create mode 100644 tests/python/mkl/test_mkldnn_gluon.py diff --git a/tests/python/mkl/test_mkldnn.py b/tests/python/mkl/test_mkldnn.py index 8c296deef20a..7f4a4d062d7a 100644 --- a/tests/python/mkl/test_mkldnn.py +++ b/tests/python/mkl/test_mkldnn.py @@ -22,6 +22,7 @@ import os import numpy as np import mxnet as mx +from test_mkldnn_gluon import * from mxnet.test_utils import assert_almost_equal from mxnet import gluon from mxnet.gluon import nn diff --git a/tests/python/mkl/test_mkldnn_gluon.py b/tests/python/mkl/test_mkldnn_gluon.py new file mode 100644 index 000000000000..e606106ae5ce --- /dev/null +++ b/tests/python/mkl/test_mkldnn_gluon.py @@ -0,0 +1,1085 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +MKL-DNN related test cases +""" +import sys +import os +import numpy as np +import mxnet as mx +import unittest +from mxnet.test_utils import assert_almost_equal +from mxnet import gluon +from mxnet.gluon import nn +from mxnet.test_utils import * +curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) +sys.path.append(os.path.join(curr_path, '../unittest/')) +from common import with_seed + +def check_layer_forward_withinput(net, x): + x_hybrid = x.copy() + x.attach_grad() + x_hybrid.attach_grad() + net.collect_params().initialize() + with mx.autograd.record(): + out1 = net(x) + out1.backward() + net.hybridize() + with mx.autograd.record(): + out2 = net(x_hybrid) + out2.backward() + mx.test_utils.assert_almost_equal(x.grad.asnumpy(), x_hybrid.grad.asnumpy(), rtol=1e-5, atol=1e-6) + mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6) + +@with_seed() +def test_conv2d_16c(): + chn_list = [16, 256] + kernel_list = [1, 3] + #kernel_list.append(224) + batch_size = 8 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.conv0(x) + return out + + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224)) + for i in range(len(chn_list)): + for j in range(len(kernel_list)): + net = Net(chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_group_conv2d_16c(): + grp_list = [16] + input_size_list = np.random.randint(low=3, high=65, size=10).tolist() + kernel_list = [1, 3] + batch_size = 8 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (1, 1)) + self.conv1 = gluon.nn.Conv2D(chn_num, (kernel, kernel), groups=chn_num) + + def hybrid_forward(self, F, x): + y = self.conv0(x) + out = self.conv1(y) + return out + + for i in range(len(input_size_list)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, input_size_list[i], input_size_list[i])) + for j in range(len(grp_list)): + for k in range(len(kernel_list)): + net = Net(grp_list[j], kernel_list[k]) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_deconv2d_16c(): + in_chn_list = [1024, 512, 256, 128, 64, 32, 16] + out_chn_list = [512, 256, 128, 64, 32, 16, 3] + kernel_list = [1, 3, 5, 7] + in_shape = [4, 8, 16, 32, 64, 224] + batch_size = 8 + class Net(gluon.HybridBlock): + def __init__(self, chn_num, kernel, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.deconv0(x) + return out + for i in range(len(in_shape)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i])) + for j in range(len(kernel_list)): + net = Net(out_chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_batchnorm_16c(): + chn_list = [16, 1024] + shape = np.random.randint(low=1, high=300, size=10) + shape_list = [] + for i in range(len(shape)): + shape_list.append((shape[i], shape[i])) + batch_size = 8 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + axis, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + self.bn0 = gluon.nn.BatchNorm(axis=axis) + + def hybrid_forward(self, F, x): + conv = self.conv0(x) + out = self.bn0(conv) + return out + + for i in range(len(chn_list)): + for j in range(len(shape_list)): + shape = (batch_size, ) + (3,) + shape_list[j] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + net = Net(chn_list[i], 1, 1) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_concat(): + chn_list = [64, 16] + shapes = [7, 5, 3] + input_num = np.random.randint(low=2, high=11) + shape_list = [] + for i in range(len(shapes)): + shape_list.append((shapes[i], shapes[i])) + batch_size = 8 + class Net(gluon.HybridBlock): + def __init__(self, + check_dim, + input_num, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + from mxnet.gluon.contrib.nn import HybridConcurrent + self.concat = HybridConcurrent(axis=check_dim) + for i in range(input_num): + self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel))) + + def hybrid_forward(self, F, x): + return self.concat(x) + + for s in range(len(shape_list)): + shape = (batch_size,) + (3,) + shape_list[i] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + for i in range(len(chn_list)): + for axis in range(4): + net = Net(axis, input_num, chn_list[i], 1) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 448, 112)) + y = self.conv0(x_reshape) + y_reshape = y.reshape((0, 0, 223, 220)) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(16, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 2, 0, 0), end=(4, 5, 32, 32)) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_conv_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(16, (1, 1)) + self.conv1 = nn.Conv2D(16, (1, 1)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) + y = self.conv0(x_slice) + y_slice = y.slice(begin=(0, 1, 0, 0), end=(4, 4, 16, 16)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(256, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 0, 1, 1), end=(32, 3, 225, 225)) + y = self.conv0(x_slice) + y_reshape = y.reshape((0, 0, 444, 111)) + out = self.conv1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(32, 3, 299, 299)) + net = Net() + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_conv_slice_conv(): + """ + This test will test gluon Conv2d computation with ndarray reshape and slice + """ + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(16, (1, 1)) + self.conv1 = nn.Conv2D(16, (1, 1)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 64, 16)) + y = self.conv0(x_reshape) + y_slice = y.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(8, 3, 32, 32)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 128, -1)) + out = self.dense0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.dense0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 50, 0], [8, 32, 64, 64]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 50 + channel1 = np.random.randint(1, 33) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_slice = y.slice(begin=(4, 0), end=(-1, 10)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 50, 0], [8, 32, 64, 64]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + channel1 = np.random.randint(1, 65) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 128, -1)) + y = self.dense0(x_reshape) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + channel1 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 50, 0], [8, 32, 64, 64]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() + +def test_reshape_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 64 + channel1 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 128, -1)) + y = self.dense0(x_reshape) + y_slice = y.slice(begin=(0, 32), end=(8, 64)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + out = self.bn0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (32, 512, 128, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(3) + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.bn0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 64, 50, 0], [8, 128, 256, 256]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(3) + self.bn1 = nn.BatchNorm(1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1])) + y = self.bn0(x_slice) + y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[[0, 64, 50, 0], [8, 128, 200, 256]], [[4, 50, 0, 128], [7, -1, -1, -1]]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(0) + self.bn1 = nn.BatchNorm(2) + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape[0]) + y = self.bn0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 512)) + shape = [(8, 256, 128, -1), (32, 128, 512, -1)] + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(0) + self.bn1 = nn.BatchNorm(2) + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.bn0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 64, 50, 0], [8, 128, 200, 256]] + shape = (1, 128, 256, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm(2) + self.bn1 = nn.BatchNorm(0) + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + y = self.bn0(x_reshape) + y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 0, 50, 0], [8, 1, -1, 100]] + shape = (128, 1, 256, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.pool0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (128, 256, 256, -1) + for i in range(len(pooling_layers)): + net = Net(shape, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(12, 0, 128, 64), (16, 16, 256, 256)] + for i in range(len(pooling_layers)): + net = Net(slice, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 2), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.pool0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = [(128, 256, 64, -1), (128, 256, 11, -1)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + shape[1] = (256, 128, 1, 1) + net = Net(shape, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.pool0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice[1] = [(0, 64, 0, 0), (2, -1, 1, 1)] + net = Net(slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.pool0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(8, 0, 100, 50), (16, 128, 256, 256)] + shape = (32, -1, 0, 0) + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.pool0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (0, 512, 64, -1) + slice = [(8, 256, 10, 20), (-1, -1, -1, 70)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice = [(8, 256, 0, 0), (-1, -1, 1, 1)] + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(64, 2, 256, 256)) + shape = (8, 16, 64, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) + slice = [(0, 16, 0, 0), (1, 32, 256, 256)] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.conv0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) + shape = [(32, 0, 256, -1), (64, 32, 129, -1)] + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.conv0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) + slice = [[(0, 16, 0, 0), (8, 32, 128, 128)], [(4, 0, 2, 0), (8, 32, 130, 128)]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.conv0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(16, 4, 500, 500)) + shape = (32, 16, 125, -1) + slice = [(4, 32, 0, 0), (20, 64, 64, 224)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.conv0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) + shape = (24, 16, 452, -1) + slice = [(4, 0, 0, 0), (16, 32, 224, 224)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.act(x_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = (64, 8, 128, -1) + net = Net(act, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act(x_slice) + return out + + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [(8, 16, 0, 0), (16, 32, 100, 100)] + net = Net(act, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.act0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = [(64, 8, 128, -1), (16, 64, 128, -1)] + net = Net(act0, act1, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.act0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [[(0, 0, 100, 100), (8, 16, 256, 512)], [(2, 4, 0, 0), (8, 10, 128, 128)]] + net = Net(act0, act1, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.act0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + shape = (64, 16, 128, -1) + slice = [(0, 0, 0, 100), (8, 16, 64, 228)] + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.act0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) + slice = [(0, 0, 0, 100), (8, 16, 64, 228)] + shape = (64, 16, 64, -1) + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) + +if __name__ == '__main__': + import nose + nose.runmodule() diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 310f1d4bd944..ed79011673ee 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1428,1057 +1428,6 @@ def hybrid_forward(self, F, array, index): with mx.autograd.record(train_mode=True): row, _ = model(array, index) row.backward() - -def check_layer_forward_withinput(net, x): - x_hybrid = x.copy() - x.attach_grad() - x_hybrid.attach_grad() - net.collect_params().initialize() - with mx.autograd.record(): - out1 = net(x) - out1.backward() - net.hybridize() - with mx.autograd.record(): - out2 = net(x_hybrid) - out2.backward() - mx.test_utils.assert_almost_equal(x.grad.asnumpy(), x_hybrid.grad.asnumpy(), rtol=1e-5, atol=1e-6) - mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6) - -@with_seed() -def test_conv2d_16c(): - chn_list = [16, 256] - kernel_list = [1, 3] - #kernel_list.append(224) - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) - - def hybrid_forward(self, F, x): - out = self.conv0(x) - return out - - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224)) - for i in range(len(chn_list)): - for j in range(len(kernel_list)): - net = Net(chn_list[i], kernel_list[j]) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_group_conv2d_16c(): - grp_list = [16] - input_size_list = np.random.randint(low=3, high=65, size=10).tolist() - kernel_list = [1, 3] - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (1, 1)) - self.conv1 = gluon.nn.Conv2D(chn_num, (kernel, kernel), groups=chn_num) - - def hybrid_forward(self, F, x): - y = self.conv0(x) - out = self.conv1(y) - return out - - for i in range(len(input_size_list)): - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, input_size_list[i], input_size_list[i])) - for j in range(len(grp_list)): - for k in range(len(kernel_list)): - net = Net(grp_list[j], kernel_list[k]) - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_deconv2d_16c(): - in_chn_list = [1024, 512, 256, 128, 64, 32, 16] - out_chn_list = [512, 256, 128, 64, 32, 16, 3] - kernel_list = [1, 3, 5, 7] - in_shape = [4, 8, 16, 32, 64, 224] - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, chn_num, kernel, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel)) - - def hybrid_forward(self, F, x): - out = self.deconv0(x) - return out - for i in range(len(in_shape)): - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i])) - for j in range(len(kernel_list)): - net = Net(out_chn_list[i], kernel_list[j]) - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_batchnorm_16c(): - chn_list = [16, 1024] - shape = np.random.randint(low=1, high=300, size=10) - shape_list = [] - for i in range(len(shape)): - shape_list.append((shape[i], shape[i])) - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - axis, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) - self.bn0 = gluon.nn.BatchNorm(axis=axis) - - def hybrid_forward(self, F, x): - conv = self.conv0(x) - out = self.bn0(conv) - return out - - for i in range(len(chn_list)): - for j in range(len(shape_list)): - shape = (batch_size, ) + (3,) + shape_list[j] - x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) - net = Net(chn_list[i], 1, 1) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_concat(): - chn_list = [64, 16] - shapes = [7, 5, 3] - input_num = np.random.randint(low=2, high=11) - shape_list = [] - for i in range(len(shapes)): - shape_list.append((shapes[i], shapes[i])) - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - check_dim, - input_num, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - from mxnet.gluon.contrib.nn import HybridConcurrent - self.concat = HybridConcurrent(axis=check_dim) - for i in range(input_num): - self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel))) - - def hybrid_forward(self, F, x): - return self.concat(x) - - for s in range(len(shape_list)): - shape = (batch_size,) + (3,) + shape_list[i] - x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) - for i in range(len(chn_list)): - for axis in range(4): - net = Net(axis, input_num, chn_list[i], 1) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) - out = self.conv0(x_reshape) - return out - x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_conv_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) - y = self.conv0(x_reshape) - y_reshape = y.reshape((0, 0, 223, 220)) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(16, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 2, 0, 0), end=(4, 5, 32, 32)) - out = self.conv0(x_slice) - return out - x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_conv_slice_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(16, (1, 1)) - self.conv1 = nn.Conv2D(16, (1, 1)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) - y = self.conv0(x_slice) - y_slice = y.slice(begin=(0, 1, 0, 0), end=(4, 4, 16, 16)) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_conv_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 0, 1, 1), end=(32, 3, 225, 225)) - y = self.conv0(x_slice) - y_reshape = y.reshape((0, 0, 444, 111)) - out = self.conv1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(32, 3, 299, 299)) - net = Net() - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_conv_slice_conv(): - """ - This test will test gluon Conv2d computation with ndarray reshape and slice - """ - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(16, (1, 1)) - self.conv1 = nn.Conv2D(16, (1, 1)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 64, 16)) - y = self.conv0(x_reshape) - y_slice = y.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(8, 3, 32, 32)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 128, -1)) - out = self.dense0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), - end=tuple(self.slice[1])) - out = self.dense0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - slice = [[0, 16, 50, 0], [8, 32, 64, 64]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_dense_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = 50 - channel1 = np.random.randint(1, 33) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.dense0(x_slice) - y_slice = y.slice(begin=(4, 0), end=(-1, 10)) - out = self.dense1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - slice = [[0, 16, 50, 0], [8, 32, 64, 64]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_dense_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - channel1 = np.random.randint(1, 65) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 128, -1)) - y = self.dense0(x_reshape) - y_reshape = y.reshape((1, -1)) - out = self.dense1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_dense_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - channel1 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.dense0(x_slice) - y_reshape = y.reshape((1, -1)) - out = self.dense1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - slice = [[0, 16, 50, 0], [8, 32, 64, 64]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() - -def test_reshape_dense_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = 64 - channel1 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 128, -1)) - y = self.dense0(x_reshape) - y_slice = y.slice(begin=(0, 32), end=(8, 64)) - out = self.dense1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm() - self.reshape = shape - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape) - out = self.bn0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (32, 512, 128, -1) - net = Net(shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(3) - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0]), - end=tuple(self.slice[1])) - out = self.bn0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 64, 50, 0], [8, 128, 256, 256]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(3) - self.bn1 = nn.BatchNorm(1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1])) - y = self.bn0(x_slice) - y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1])) - out = self.bn1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[[0, 64, 50, 0], [8, 128, 200, 256]], [[4, 50, 0, 128], [7, -1, -1, -1]]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_batchnorm_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(0) - self.bn1 = nn.BatchNorm(2) - self.reshape = shape - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape[0]) - y = self.bn0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.bn1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 512)) - shape = [(8, 256, 128, -1), (32, 128, 512, -1)] - net = Net(shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(0) - self.bn1 = nn.BatchNorm(2) - self.reshape = shape - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.bn0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.bn1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 64, 50, 0], [8, 128, 200, 256]] - shape = (1, 128, 256, -1) - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_batchnorm_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(2) - self.bn1 = nn.BatchNorm(0) - self.reshape = shape - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape) - y = self.bn0(x_reshape) - y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - out = self.bn1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 0, 50, 0], [8, 1, -1, 100]] - shape = (128, 1, 256, -1) - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - pooling_layer, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.pool0 = pooling_layer - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.pool0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (128, 256, 256, -1) - for i in range(len(pooling_layers)): - net = Net(shape, pooling_layers[i]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - slice, - pooling_layer, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.pool0 = pooling_layer - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.pool0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [(12, 0, 128, 64), (16, 16, 256, 256)] - for i in range(len(pooling_layers)): - net = Net(slice, pooling_layers[i]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 2), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.pool0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.pool1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = [(128, 256, 64, -1), (128, 256, 11, -1)] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - shape[1] = (256, 128, 1, 1) - net = Net(shape, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.pool0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.pool1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - slice[1] = [(0, 64, 0, 0), (2, -1, 1, 1)] - net = Net(slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.pool0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.pool1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [(8, 0, 100, 50), (16, 128, 256, 256)] - shape = (32, -1, 0, 0) - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.pool0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.pool1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (0, 512, 64, -1) - slice = [(8, 256, 10, 20), (-1, -1, -1, 70)] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - slice = [(8, 256, 0, 0), (-1, -1, 1, 1)] - net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.conv0(x_reshape) - return out - x = mx.nd.random.uniform(shape=(64, 2, 256, 256)) - shape = (8, 16, 64, -1) - net = Net(shape) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.conv0(x_slice) - return out - x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) - slice = [(0, 16, 0, 0), (1, 32, 256, 256)] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.conv0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) - shape = [(32, 0, 256, -1), (64, 32, 129, -1)] - net = Net(shape) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.conv0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) - slice = [[(0, 16, 0, 0), (8, 32, 128, 128)], [(4, 0, 2, 0), (8, 32, 130, 128)]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.conv0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(16, 4, 500, 500)) - shape = (32, 16, 125, -1) - slice = [(4, 32, 0, 0), (20, 64, 64, 224)] - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.conv0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) - shape = (24, 16, 452, -1) - slice = [(4, 0, 0, 0), (16, 32, 224, 224)] - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.act = nn.Activation(act) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.act(x_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for act in acts: - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = (64, 8, 128, -1) - net = Net(act, shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.act = nn.Activation(act) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.act(x_slice) - return out - - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for act in acts: - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [(8, 16, 0, 0), (16, 32, 100, 100)] - net = Net(act, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_activation_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.act0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.act1(y_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = [(64, 8, 128, -1), (16, 64, 128, -1)] - net = Net(act0, act1, shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.act0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.act1(y_slice) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [[(0, 0, 100, 100), (8, 16, 256, 512)], [(2, 4, 0, 0), (8, 10, 128, 128)]] - net = Net(act0, act1, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_activation_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.act0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.act1(y_slice) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = (64, 16, 128, -1) - slice = [(0, 0, 0, 100), (8, 16, 64, 228)] - net = Net(act0, act1, shape, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.act0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.act1(y_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [(0, 0, 0, 100), (8, 16, 64, 228)] - shape = (64, 16, 64, -1) - net = Net(act0, act1, shape, slice) - check_layer_forward_withinput(net, x) - if __name__ == '__main__': import nose From 92f76c7fa5144a14bd23f0c39f1dd632238b7f85 Mon Sep 17 00:00:00 2001 From: Shufan Date: Thu, 12 Jul 2018 18:21:50 +0800 Subject: [PATCH 10/12] fix flaky case --- tests/python/mkl/test_mkldnn.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/python/mkl/test_mkldnn.py b/tests/python/mkl/test_mkldnn.py index 7f4a4d062d7a..d218a5a26ffe 100644 --- a/tests/python/mkl/test_mkldnn.py +++ b/tests/python/mkl/test_mkldnn.py @@ -65,18 +65,18 @@ def test_mkldnn_ndarray_slice(): net = gluon.nn.HybridSequential() with net.name_scope(): net.add(gluon.nn.Conv2D(channels=32, kernel_size=3, activation=None)) - net.collect_params().initialize(ctx=ctx) + net.collect_params().initialize(init=mx.init.Constant(0.001), ctx=ctx) x = mx.nd.array(np.ones([32, 3, 224, 224]), ctx) y = net(x) # trigger computation on ndarray slice - assert_almost_equal(y[0].asnumpy()[0, 0, 0], 0.3376348) + assert_almost_equal(y[0].asnumpy()[0, 0, 0], 0.027) def test_mkldnn_engine_threading(): net = gluon.nn.HybridSequential() with net.name_scope(): net.add(gluon.nn.Conv2D(channels=32, kernel_size=3, activation=None)) - net.collect_params().initialize(ctx=mx.cpu()) + net.collect_params().initialize(init=mx.init.Constant(0.001), ctx=mx.cpu()) class Dummy(gluon.data.Dataset): def __len__(self): return 2 @@ -93,8 +93,8 @@ def __getitem__(self, key): # below line triggers different execution thread for _ in loader: y = net(mx.nd.array(np.ones(X))).asnumpy() - # output should be 016711406 (non-mkldnn mode output) - assert_almost_equal(y[0, 0, 0, 0], 0.016711406) + # output should be 0.027 (non-mkldnn mode output) + assert_almost_equal(y[0, 0, 0, 0], 0.027) break From dc2fc15f8c0b253d2f3b7a226fb8d821161e2840 Mon Sep 17 00:00:00 2001 From: Shufan Date: Wed, 12 Sep 2018 21:43:09 +0800 Subject: [PATCH 11/12] Remove the test_mkldnn_gluon.py --- tests/python/mkl/test_mkldnn_gluon.py | 1085 ------------------------- 1 file changed, 1085 deletions(-) delete mode 100644 tests/python/mkl/test_mkldnn_gluon.py diff --git a/tests/python/mkl/test_mkldnn_gluon.py b/tests/python/mkl/test_mkldnn_gluon.py deleted file mode 100644 index e606106ae5ce..000000000000 --- a/tests/python/mkl/test_mkldnn_gluon.py +++ /dev/null @@ -1,1085 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -""" -MKL-DNN related test cases -""" -import sys -import os -import numpy as np -import mxnet as mx -import unittest -from mxnet.test_utils import assert_almost_equal -from mxnet import gluon -from mxnet.gluon import nn -from mxnet.test_utils import * -curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) -sys.path.append(os.path.join(curr_path, '../unittest/')) -from common import with_seed - -def check_layer_forward_withinput(net, x): - x_hybrid = x.copy() - x.attach_grad() - x_hybrid.attach_grad() - net.collect_params().initialize() - with mx.autograd.record(): - out1 = net(x) - out1.backward() - net.hybridize() - with mx.autograd.record(): - out2 = net(x_hybrid) - out2.backward() - mx.test_utils.assert_almost_equal(x.grad.asnumpy(), x_hybrid.grad.asnumpy(), rtol=1e-5, atol=1e-6) - mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6) - -@with_seed() -def test_conv2d_16c(): - chn_list = [16, 256] - kernel_list = [1, 3] - #kernel_list.append(224) - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) - - def hybrid_forward(self, F, x): - out = self.conv0(x) - return out - - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224)) - for i in range(len(chn_list)): - for j in range(len(kernel_list)): - net = Net(chn_list[i], kernel_list[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_group_conv2d_16c(): - grp_list = [16] - input_size_list = np.random.randint(low=3, high=65, size=10).tolist() - kernel_list = [1, 3] - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (1, 1)) - self.conv1 = gluon.nn.Conv2D(chn_num, (kernel, kernel), groups=chn_num) - - def hybrid_forward(self, F, x): - y = self.conv0(x) - out = self.conv1(y) - return out - - for i in range(len(input_size_list)): - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, input_size_list[i], input_size_list[i])) - for j in range(len(grp_list)): - for k in range(len(kernel_list)): - net = Net(grp_list[j], kernel_list[k]) - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_deconv2d_16c(): - in_chn_list = [1024, 512, 256, 128, 64, 32, 16] - out_chn_list = [512, 256, 128, 64, 32, 16, 3] - kernel_list = [1, 3, 5, 7] - in_shape = [4, 8, 16, 32, 64, 224] - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, chn_num, kernel, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel)) - - def hybrid_forward(self, F, x): - out = self.deconv0(x) - return out - for i in range(len(in_shape)): - x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i])) - for j in range(len(kernel_list)): - net = Net(out_chn_list[i], kernel_list[j]) - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_batchnorm_16c(): - chn_list = [16, 1024] - shape = np.random.randint(low=1, high=300, size=10) - shape_list = [] - for i in range(len(shape)): - shape_list.append((shape[i], shape[i])) - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - chn_num, - kernel, - axis, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) - self.bn0 = gluon.nn.BatchNorm(axis=axis) - - def hybrid_forward(self, F, x): - conv = self.conv0(x) - out = self.bn0(conv) - return out - - for i in range(len(chn_list)): - for j in range(len(shape_list)): - shape = (batch_size, ) + (3,) + shape_list[j] - x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) - net = Net(chn_list[i], 1, 1) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_concat(): - chn_list = [64, 16] - shapes = [7, 5, 3] - input_num = np.random.randint(low=2, high=11) - shape_list = [] - for i in range(len(shapes)): - shape_list.append((shapes[i], shapes[i])) - batch_size = 8 - class Net(gluon.HybridBlock): - def __init__(self, - check_dim, - input_num, - chn_num, - kernel, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - from mxnet.gluon.contrib.nn import HybridConcurrent - self.concat = HybridConcurrent(axis=check_dim) - for i in range(input_num): - self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel))) - - def hybrid_forward(self, F, x): - return self.concat(x) - - for s in range(len(shape_list)): - shape = (batch_size,) + (3,) + shape_list[i] - x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) - for i in range(len(chn_list)): - for axis in range(4): - net = Net(axis, input_num, chn_list[i], 1) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) - out = self.conv0(x_reshape) - return out - x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_conv_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 448, 112)) - y = self.conv0(x_reshape) - y_reshape = y.reshape((0, 0, 223, 220)) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(32, 3, 224, 224)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(16, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 2, 0, 0), end=(4, 5, 32, 32)) - out = self.conv0(x_slice) - return out - x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_conv_slice_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(16, (1, 1)) - self.conv1 = nn.Conv2D(16, (1, 1)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) - y = self.conv0(x_slice) - y_slice = y.slice(begin=(0, 1, 0, 0), end=(4, 4, 16, 16)) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_conv_reshape_conv(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(64, (3, 3)) - self.conv1 = nn.Conv2D(256, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=(0, 0, 1, 1), end=(32, 3, 225, 225)) - y = self.conv0(x_slice) - y_reshape = y.reshape((0, 0, 444, 111)) - out = self.conv1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(32, 3, 299, 299)) - net = Net() - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_conv_slice_conv(): - """ - This test will test gluon Conv2d computation with ndarray reshape and slice - """ - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(16, (1, 1)) - self.conv1 = nn.Conv2D(16, (1, 1)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((0, 0, 64, 16)) - y = self.conv0(x_reshape) - y_slice = y.slice(begin=(0, 0, 0, 0), end=(4, 3, 16, 16)) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(8, 3, 32, 32)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 128, -1)) - out = self.dense0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), - end=tuple(self.slice[1])) - out = self.dense0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - slice = [[0, 16, 50, 0], [8, 32, 64, 64]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_dense_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = 50 - channel1 = np.random.randint(1, 33) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.dense0(x_slice) - y_slice = y.slice(begin=(4, 0), end=(-1, 10)) - out = self.dense1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - slice = [[0, 16, 50, 0], [8, 32, 64, 64]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_dense_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - channel1 = np.random.randint(1, 65) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 128, -1)) - y = self.dense0(x_reshape) - y_reshape = y.reshape((1, -1)) - out = self.dense1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_dense_reshape_dense(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = np.random.randint(1, 17) - channel1 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.dense0(x_slice) - y_reshape = y.reshape((1, -1)) - out = self.dense1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - slice = [[0, 16, 50, 0], [8, 32, 64, 64]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() - -def test_reshape_dense_slice_dense(): - class Net(gluon.HybridBlock): - def __init__(self, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - channel0 = 64 - channel1 = np.random.randint(1, 17) - self.dense0 = nn.Dense(channel0) - self.dense1 = nn.Dense(channel1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape((8, 64, 128, -1)) - y = self.dense0(x_reshape) - y_slice = y.slice(begin=(0, 32), end=(8, 64)) - out = self.dense1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) - net = Net() - check_layer_forward_withinput(net, x) - - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm() - self.reshape = shape - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape) - out = self.bn0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (32, 512, 128, -1) - net = Net(shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(3) - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0]), - end=tuple(self.slice[1])) - out = self.bn0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 64, 50, 0], [8, 128, 256, 256]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(3) - self.bn1 = nn.BatchNorm(1) - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1])) - y = self.bn0(x_slice) - y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1])) - out = self.bn1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[[0, 64, 50, 0], [8, 128, 200, 256]], [[4, 50, 0, 128], [7, -1, -1, -1]]] - net = Net(slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_batchnorm_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(0) - self.bn1 = nn.BatchNorm(2) - self.reshape = shape - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape[0]) - y = self.bn0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.bn1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 512)) - shape = [(8, 256, 128, -1), (32, 128, 512, -1)] - net = Net(shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_batchnorm_reshape_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(0) - self.bn1 = nn.BatchNorm(2) - self.reshape = shape - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - y = self.bn0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.bn1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 64, 50, 0], [8, 128, 200, 256]] - shape = (1, 128, 256, -1) - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_batchnorm_slice_batchnorm(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.conv0 = nn.Conv2D(128, (1, 1)) - self.bn0 = nn.BatchNorm(2) - self.bn1 = nn.BatchNorm(0) - self.reshape = shape - self.slice = slice - - def hybrid_forward(self, F, x): - x_in = self.conv0(x) - x_reshape = x_in.reshape(self.reshape) - y = self.bn0(x_reshape) - y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) - out = self.bn1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[0, 0, 50, 0], [8, 1, -1, 100]] - shape = (128, 1, 256, -1) - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - pooling_layer, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.pool0 = pooling_layer - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.pool0(x_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (128, 256, 256, -1) - for i in range(len(pooling_layers)): - net = Net(shape, pooling_layers[i]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - slice, - pooling_layer, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.pool0 = pooling_layer - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.pool0(x_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [(12, 0, 128, 64), (16, 16, 256, 256)] - for i in range(len(pooling_layers)): - net = Net(slice, pooling_layers[i]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 2), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.pool0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.pool1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = [(128, 256, 64, -1), (128, 256, 11, -1)] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - shape[1] = (256, 128, 1, 1) - net = Net(shape, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.pool0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.pool1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - slice[1] = [(0, 64, 0, 0), (2, -1, 1, 1)] - net = Net(slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_slice_pooling2d_reshape_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.pool0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.pool1(y_reshape) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - slice = [(8, 0, 100, 50), (16, 128, 256, 256)] - shape = (32, -1, 0, 0) - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_pooling2d_slice_pooling2d(): - max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) - avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) - global_maxpooling = nn.GlobalMaxPool2D() - global_avgpooling = nn.GlobalAvgPool2D() - pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] - class Net(gluon.HybridBlock): - def __init__(self, - shape, - slice, - pooling_layer1, - pooling_layer2, - **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.pool0 = pooling_layer1 - self.pool1 = pooling_layer2 - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.pool0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.pool1(y_slice) - return out - - x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) - shape = (0, 512, 64, -1) - slice = [(8, 256, 10, 20), (-1, -1, -1, 70)] - for i in range(len(pooling_layers)): - for j in range(len(pooling_layers)): - if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): - slice = [(8, 256, 0, 0), (-1, -1, 1, 1)] - net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.conv0(x_reshape) - return out - x = mx.nd.random.uniform(shape=(64, 2, 256, 256)) - shape = (8, 16, 64, -1) - net = Net(shape) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.conv0(x_slice) - return out - x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) - slice = [(0, 16, 0, 0), (1, 32, 256, 256)] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.conv0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) - shape = [(32, 0, 256, -1), (64, 32, 129, -1)] - net = Net(shape) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.conv0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(128, 32, 500, 500)) - slice = [[(0, 16, 0, 0), (8, 32, 128, 128)], [(4, 0, 2, 0), (8, 32, 130, 128)]] - net = Net(slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_reshape_deconv_slice_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.conv0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.conv1(y_slice) - return out - x = mx.nd.random.uniform(shape=(16, 4, 500, 500)) - shape = (32, 16, 125, -1) - slice = [(4, 32, 0, 0), (20, 64, 64, 224)] - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') -def test_slice_deconv_reshape_deconv(): - class Net(gluon.HybridBlock): - def __init__(self, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.conv0 = nn.Conv2DTranspose(64, (3, 3)) - self.conv1 = nn.Conv2DTranspose(128, (2, 3), strides=(2, 2)) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.conv0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.conv1(y_reshape) - return out - x = mx.nd.random.uniform(shape=(16, 32, 256, 512)) - shape = (24, 16, 452, -1) - slice = [(4, 0, 0, 0), (16, 32, 224, 224)] - net = Net(shape, slice) - check_layer_forward_withinput(net, x) - -@with_seed() -def test_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.act = nn.Activation(act) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - out = self.act(x_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for act in acts: - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = (64, 8, 128, -1) - net = Net(act, shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.act = nn.Activation(act) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - out = self.act(x_slice) - return out - - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for act in acts: - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [(8, 16, 0, 0), (16, 32, 100, 100)] - net = Net(act, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_activation_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape[0]) - y = self.act0(x_reshape) - y_reshape = y.reshape(self.reshape[1]) - out = self.act1(y_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = [(64, 8, 128, -1), (16, 64, 128, -1)] - net = Net(act0, act1, shape) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) - y = self.act0(x_slice) - y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) - out = self.act1(y_slice) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [[(0, 0, 100, 100), (8, 16, 256, 512)], [(2, 4, 0, 0), (8, 10, 128, 128)]] - net = Net(act0, act1, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_reshape_activation_slice_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_reshape = x.reshape(self.reshape) - y = self.act0(x_reshape) - y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) - out = self.act1(y_slice) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - shape = (64, 16, 128, -1) - slice = [(0, 0, 0, 100), (8, 16, 64, 228)] - net = Net(act0, act1, shape, slice) - check_layer_forward_withinput(net, x) - - -@with_seed() -def test_slice_activation_reshape_activation(): - class Net(gluon.HybridBlock): - def __init__(self, act0, act1, shape, slice, **kwargs): - super(Net, self).__init__(**kwargs) - with self.name_scope(): - self.reshape = shape - self.slice = slice - self.act0 = nn.Activation(act0) - self.act1 = nn.Activation(act1) - - def hybrid_forward(self, F, x): - x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) - y = self.act0(x_slice) - y_reshape = y.reshape(self.reshape) - out = self.act1(y_reshape) - return out - acts = ["relu", "sigmoid", "tanh", "softrelu"] - for idx0, act0 in enumerate(acts): - for idx1, act1 in enumerate(acts): - if idx1 == idx0: - continue - x = mx.nd.random.uniform(-1, 1, shape=(16, 32, 256, 512)) - slice = [(0, 0, 0, 100), (8, 16, 64, 228)] - shape = (64, 16, 64, -1) - net = Net(act0, act1, shape, slice) - check_layer_forward_withinput(net, x) - -if __name__ == '__main__': - import nose - nose.runmodule() From 2d48d7d228f0ddce1b5d39de090fd663f51cf96d Mon Sep 17 00:00:00 2001 From: Shufan Date: Thu, 13 Sep 2018 15:54:19 +0800 Subject: [PATCH 12/12] Move the cases back to test_gluon.py --- tests/python/unittest/test_gluon.py | 1060 +++++++++++++++++++++++++++ 1 file changed, 1060 insertions(+) diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 566b654cbf6d..72c672c8f8fc 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -1455,6 +1455,1066 @@ def hybrid_forward(self, F, array, index): row, _ = model(array, index) row.backward() + +def check_layer_forward_withinput(net, x): + x_hybrid = x.copy() + x.attach_grad() + x_hybrid.attach_grad() + net.collect_params().initialize() + with mx.autograd.record(): + out1 = net(x) + out1.backward() + net.hybridize() + with mx.autograd.record(): + out2 = net(x_hybrid) + out2.backward() + mx.test_utils.assert_almost_equal(x.grad.asnumpy(), x_hybrid.grad.asnumpy(), rtol=1e-5, atol=1e-6) + mx.test_utils.assert_almost_equal(out1.asnumpy(), out2.asnumpy(), rtol=1e-5, atol=1e-6) + +@with_seed() +def test_conv2d_16c(): + chn_list = [16, 256] + kernel_list = [1, 3] + kernel_list.append(224) + batch_size = 4 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.conv0(x) + return out + + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, 224, 224)) + for i in range(len(chn_list)): + for j in range(len(kernel_list)): + net = Net(chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_group_conv2d_16c(): + grp_list = [16] + input_size_list = np.random.randint(low=3, high=65, size=10).tolist() + kernel_list = [1, 3] + batch_size = 4 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (1, 1)) + self.conv1 = gluon.nn.Conv2D(chn_num, (kernel, kernel), groups=chn_num) + + def hybrid_forward(self, F, x): + y = self.conv0(x) + out = self.conv1(y) + return out + + for i in range(len(input_size_list)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, 3, input_size_list[i], input_size_list[i])) + for j in range(len(grp_list)): + for k in range(len(kernel_list)): + net = Net(grp_list[j], kernel_list[k]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_deconv2d_16c(): + in_chn_list = [1024, 512, 256, 128, 64, 32, 16] + out_chn_list = [512, 256, 128, 64, 32, 16, 3] + kernel_list = [1, 3, 5, 7] + in_shape = [4, 8, 16, 32, 64, 224] + batch_size = 4 + class Net(gluon.HybridBlock): + def __init__(self, chn_num, kernel, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.deconv0 = gluon.nn.Conv2DTranspose(chn_num, (kernel, kernel)) + + def hybrid_forward(self, F, x): + out = self.deconv0(x) + return out + for i in range(len(in_shape)): + x = mx.nd.random.uniform(-1.0, 1.0, shape=(batch_size, in_chn_list[i], in_shape[i], in_shape[i])) + for j in range(len(kernel_list)): + net = Net(out_chn_list[i], kernel_list[j]) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_batchnorm_16c(): + chn_list = [16, 1024] + shape = np.random.randint(low=1, high=300, size=10) + shape_list = [] + for i in range(len(shape)): + shape_list.append((shape[i], shape[i])) + batch_size = 4 + class Net(gluon.HybridBlock): + def __init__(self, + chn_num, + kernel, + axis, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = gluon.nn.Conv2D(chn_num, (kernel, kernel)) + self.bn0 = gluon.nn.BatchNorm(axis=axis) + + def hybrid_forward(self, F, x): + conv = self.conv0(x) + out = self.bn0(conv) + return out + + for i in range(len(chn_list)): + for j in range(len(shape_list)): + shape = (batch_size, ) + (3,) + shape_list[j] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + net = Net(chn_list[i], 1, 1) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_concat(): + chn_list = [16, 64] + shapes = [1, 3, 5] + input_num = np.random.randint(low=2, high=11) + shape_list = [] + for i in range(len(shapes)): + shape_list.append((shapes[i], shapes[i])) + batch_size = 4 + class Net(gluon.HybridBlock): + def __init__(self, + check_dim, + input_num, + chn_num, + kernel, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + from mxnet.gluon.contrib.nn import HybridConcurrent + self.concat = HybridConcurrent(axis=check_dim) + for i in range(input_num): + self.concat.add(gluon.nn.Conv2D(chn_num, (kernel, kernel))) + + def hybrid_forward(self, F, x): + return self.concat(x) + + for s in range(len(shape_list)): + shape = (batch_size,) + (3,) + shape_list[i] + x = mx.nd.random.uniform(-1.0, 1.0, shape=shape) + for i in range(len(chn_list)): + for axis in range(4): + net = Net(axis, input_num, chn_list[i], 1) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 128, 32)) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(4, 3, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(128, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 128, 32)) + y = self.conv0(x_reshape) + "spatial shape of y is (62, 62)" + y_reshape = y.reshape((0, 0, 124, 31)) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(4, 3, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(16, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 2, 0, 0), end=(4, 5, 32, 32)) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(8, 6, 32, 32)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_conv_slice_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(32, (3, 3)) + self.conv1 = nn.Conv2D(16, (1, 1)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 0, 0, 0), end=(4, 16, 16, 16)) + y = self.conv0(x_slice) + "shape of y is (4, 32, 14, 14)" + y_slice = y.slice(begin=(0, 0, 0, 0), end=(4, 16, 3, 3)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(4, 32, 32, 32)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_conv_reshape_conv(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(64, (3, 3)) + self.conv1 = nn.Conv2D(128, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=(0, 0, 1, 1), end=(4, 16, 33, 33)) + y = self.conv0(x_slice) + "shape of y is (4, 64, 30, 30)" + y_reshape = y.reshape((0, 0, 60, 15)) + out = self.conv1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(4, 32, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_conv_slice_conv(): + """ + This test will test gluon Conv2d computation with ndarray reshape and slice + """ + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(16, (3, 3)) + self.conv1 = nn.Conv2D(32, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((0, 0, 64, 16)) + y = self.conv0(x_reshape) + "shape of y is (4, 16, 62, 14)" + y_slice = y.slice(begin=(0, 0, 0, 0), end=(2, 16, 14, 14)) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(4, 3, 32, 32)) + net = Net() + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((8, 64, 128, -1)) + out = self.dense0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(4, 32, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.dense0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 0, 0], [4, 32, 32, 32]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 32 + channel1 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_slice = y.slice(begin=(1, 0), end=(3, 10)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 0, 0], [4, 32, 32, 32]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + channel1 = np.random.randint(1, 33) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((4, 16, 128, 32)) + y = self.dense0(x_reshape) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(4, 16, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_dense_reshape_dense(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = np.random.randint(1, 17) + channel1 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + self.slice = slice + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.dense0(x_slice) + y_reshape = y.reshape((1, -1)) + out = self.dense1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 32, 64, 64)) + slice = [[0, 16, 0, 0], [4, 32, 32, 32]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_dense_slice_dense(): + class Net(gluon.HybridBlock): + def __init__(self, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + channel0 = 64 + channel1 = np.random.randint(1, 17) + self.dense0 = nn.Dense(channel0) + self.dense1 = nn.Dense(channel1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape((4, 16, 128, 32)) + y = self.dense0(x_reshape) + y_slice = y.slice(begin=(1, 32), end=(3, 64)) + out = self.dense1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(4, 16, 64, 64)) + net = Net() + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(96, (1, 1)) + self.bn0 = nn.BatchNorm() + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + out = self.bn0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(4, 32, 64, 64)) + shape = (4, 64, 64, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), + end=tuple(self.slice[1])) + out = self.bn0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 0, 0, 0], [4, 32, 32, 32]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.bn1 = nn.BatchNorm() + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0][0]), end=tuple(self.slice[0][1])) + y = self.bn0(x_slice) + y_slice = y.slice(begin=tuple(self.slice[1][0]), end=tuple(self.slice[1][1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[[0, 0, 0, 0], [4, 32, 32, 32]], [[0, 0, 0, 0], [2, 64, 16, 16]]] + net = Net(slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.bn1 = nn.BatchNorm() + self.reshape = shape + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape[0]) + y = self.bn0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(4, 32, 64, 64)) + shape = [(4, 64, 64, -1), (4, 128, -1, 32)] + net = Net(shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_batchnorm_reshape_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.bn1 = nn.BatchNorm() + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_slice = x_in.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + y = self.bn0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.bn1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[0, 0, 0, 0], [4, 32, 32, 32]] + shape = (1, 128, 64, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_batchnorm_slice_batchnorm(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.conv0 = nn.Conv2D(128, (1, 1)) + self.bn0 = nn.BatchNorm() + self.bn1 = nn.BatchNorm() + self.reshape = shape + self.slice = slice + + def hybrid_forward(self, F, x): + x_in = self.conv0(x) + x_reshape = x_in.reshape(self.reshape) + y = self.bn0(x_reshape) + y_slice = y.slice(begin=tuple(self.slice[0]), end=tuple(self.slice[1])) + out = self.bn1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(4, 32, 64, 64)) + slice = [[0, 0, 0, 0], [2, 64, 32, 32]] + shape = (4, 64, 64, -1) + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.pool0(x_reshape) + return out + + x = mx.nd.random.uniform(shape=(4, 32, 32, 32)) + shape = (4, 64, 64, -1) + for i in range(len(pooling_layers)): + net = Net(shape, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool0(x_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(0, 0, 0, 0), (4, 16, 32, 64)] + for i in range(len(pooling_layers)): + net = Net(slice, pooling_layers[i]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 2), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.pool0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = [(128, 256, 64, -1), (128, 256, 11, -1)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + shape[1] = (256, 128, 1, 1) + net = Net(shape, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_slice_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.pool0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [[(8, 0, 100, 50), (16, -1, -1, -1)], [(0, 64, 0, 50), (2, -1, -1, -1)]] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice[1] = [(0, 64, 0, 0), (2, -1, 1, 1)] + net = Net(slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_pooling2d_reshape_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.pool0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.pool1(y_reshape) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + slice = [(8, 0, 100, 50), (16, 128, 256, 256)] + shape = (32, -1, 0, 0) + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_pooling2d_slice_pooling2d(): + max_pooling = nn.MaxPool2D(strides=(2, 3), padding=(1, 1)) + avg_pooling = nn.AvgPool2D(strides=(2, 2), padding=(1, 1)) + global_maxpooling = nn.GlobalMaxPool2D() + global_avgpooling = nn.GlobalAvgPool2D() + pooling_layers = [max_pooling, avg_pooling, global_maxpooling, global_avgpooling] + class Net(gluon.HybridBlock): + def __init__(self, + shape, + slice, + pooling_layer1, + pooling_layer2, + **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.pool0 = pooling_layer1 + self.pool1 = pooling_layer2 + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.pool0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.pool1(y_slice) + return out + + x = mx.nd.random.uniform(shape=(16, 128, 256, 256)) + shape = (0, 512, 64, -1) + slice = [(8, 256, 10, 20), (-1, -1, -1, 70)] + for i in range(len(pooling_layers)): + for j in range(len(pooling_layers)): + if isinstance(pooling_layers[i], (nn.GlobalMaxPool2D, nn.GlobalAvgPool2D)): + slice = [(8, 256, 0, 0), (-1, -1, 1, 1)] + net = Net(shape, slice, pooling_layers[i], pooling_layers[j]) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.conv0(x_reshape) + return out + x = mx.nd.random.uniform(shape=(4, 16, 32, 32)) + shape = (4, 16, 64, -1) + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(64, (3, 3)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv0(x_slice) + return out + x = mx.nd.random.uniform(shape=(8, 32, 64, 64)) + slice = [(0, 16, 0, 0), (4, 32, 32, 32)] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.conv0 = nn.Conv2DTranspose(32, (3, 3)) + self.conv1 = nn.Conv2DTranspose(64, (3, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.conv0(x_reshape) + "shape of y is (4, 32, 66, 18)" + y_reshape = y.reshape(self.reshape[1]) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(4, 16, 32, 32)) + shape = [(4, 16, 64, -1), (4, 32, 33, -1)] + net = Net(shape) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.conv0 = nn.Conv2DTranspose(32, (3, 3)) + self.conv1 = nn.Conv2DTranspose(64, (3, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.conv0(x_slice) + "shape of y is (4, 32, 66, 18)" + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(8, 32, 64, 64)) + slice = [[(0, 0, 0, 0), (4, 16, 32, 32)], [(0, 0, 0, 0), (2, 16, 16, 16)]] + net = Net(slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_reshape_deconv_slice_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(32, (3, 3)) + self.conv1 = nn.Conv2DTranspose(64, (3, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.conv0(x_reshape) + "shape of y is (4, 32, 66, 18)" + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.conv1(y_slice) + return out + x = mx.nd.random.uniform(shape=(4, 16, 32, 32)) + shape = (4, 16, 64, -1) + slice = [(0, 0, 0, 0), (2, 16, 16, 16)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +@unittest.skip('skippping temporarily, tracked by https://github.com/apache/incubator-mxnet/issues/11164') +def test_slice_deconv_reshape_deconv(): + class Net(gluon.HybridBlock): + def __init__(self, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.conv0 = nn.Conv2DTranspose(32, (3, 3)) + self.conv1 = nn.Conv2DTranspose(96, (3, 3), strides=(2, 2)) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.conv0(x_slice) + "shape of y is (4, 32, 34, 34)" + y_reshape = y.reshape(self.reshape) + out = self.conv1(y_reshape) + return out + x = mx.nd.random.uniform(shape=(8, 32, 64, 64)) + shape = (4, 64, 34, -1) + slice = [(4, 0, 0, 0), (8, 16, 32, 32)] + net = Net(shape, slice) + check_layer_forward_withinput(net, x) + +@with_seed() +def test_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + out = self.act(x_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(4, 16, 32, 32)) + shape = (4, 32, 32, -1) + net = Net(act, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act = nn.Activation(act) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act(x_slice) + return out + + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for act in acts: + x = mx.nd.random.uniform(-1, 1, shape=(8, 32, 64, 64)) + slice = [(0, 16, 32, 32), (4, 32, 64, 64)] + net = Net(act, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape[0]) + y = self.act0(x_reshape) + y_reshape = y.reshape(self.reshape[1]) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(4, 16, 32, 32)) + shape = [(4, 32, 32, -1), (4, 32, 16, -1)] + net = Net(act0, act1, shape) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0][0], end=self.slice[0][1]) + y = self.act0(x_slice) + y_slice = y.slice(begin=self.slice[1][0], end=self.slice[1][1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(8, 32, 64, 64)) + slice = [[(0, 16, 32, 32), (4, 32, 64, 64)], [(2, 0, 16, 16), (4, 16, 32, 32)]] + net = Net(act0, act1, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_reshape_activation_slice_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_reshape = x.reshape(self.reshape) + y = self.act0(x_reshape) + y_slice = y.slice(begin=self.slice[0], end=self.slice[1]) + out = self.act1(y_slice) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(4, 16, 32, 32)) + shape = (4, 32, 32, -1) + slice = [(0, 0, 0, 0), (2, 16, 16, 16)] + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) + + +@with_seed() +def test_slice_activation_reshape_activation(): + class Net(gluon.HybridBlock): + def __init__(self, act0, act1, shape, slice, **kwargs): + super(Net, self).__init__(**kwargs) + with self.name_scope(): + self.reshape = shape + self.slice = slice + self.act0 = nn.Activation(act0) + self.act1 = nn.Activation(act1) + + def hybrid_forward(self, F, x): + x_slice = x.slice(begin=self.slice[0], end=self.slice[1]) + y = self.act0(x_slice) + y_reshape = y.reshape(self.reshape) + out = self.act1(y_reshape) + return out + acts = ["relu", "sigmoid", "tanh", "softrelu"] + for idx0, act0 in enumerate(acts): + for idx1, act1 in enumerate(acts): + if idx1 == idx0: + continue + x = mx.nd.random.uniform(-1, 1, shape=(8, 32, 64, 64)) + slice = [(0, 16, 32, 32), (4, 32, 64, 64)] + shape = (4, 32, 32, -1) + net = Net(act0, act1, shape, slice) + check_layer_forward_withinput(net, x) + if __name__ == '__main__': import nose nose.runmodule()