Skip to content

Commit

Permalink
* formatting fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
Jeffrey-Sima committed Apr 14, 2021
1 parent b09aff4 commit 17ab1c2
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 9 deletions.
10 changes: 5 additions & 5 deletions python/tvm/relay/frontend/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -970,8 +970,9 @@ def convolution(self, inputs, input_types):
kernel_size = weight_shape[2:]
use_bias = isinstance(bias, _expr.Expr)

# We are trying to invoke various relay operations through a single conv_op variable. However the function
# signatures for some operations have additional attributes so we pass these in along with the standard ones.
# We are trying to invoke various relay operations through a single conv_op variable.
# However the function signatures for some operations have additional attributes so we
# pass these in along with the standard ones.
additional_arguments = dict()

if use_transpose:
Expand Down Expand Up @@ -1016,7 +1017,6 @@ def convolution(self, inputs, input_types):
data_layout = "NCHW"
kernel_layout = "OIHW"


conv_out = conv_op(
data,
weight,
Expand All @@ -1037,8 +1037,8 @@ def convolution(self, inputs, input_types):
else:
res = conv_out
if is_grouped_conv1d:
# Because we conducted grouped conv1d convolution through conv2d we must squeeze the output to get the
# correct result.
# Because we conducted grouped conv1d convolution through conv2d we must
# squeeze the output to get the correct result.
res = _op.squeeze(res, axis=[2])
return res

Expand Down
15 changes: 11 additions & 4 deletions tests/python/frontend/pytorch/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -952,10 +952,17 @@ def forward(self, *args):
@pytest.mark.parametrize("output_padding", [0, 1, 2], ids=lambda x: 'output_padding=' + str(x))
@pytest.mark.parametrize("groups", [1], ids=lambda x: 'groups=' + str(x))
@pytest.mark.parametrize("bias", [True, False], ids=lambda x: 'bias=' + str(x))
def test_forward_conv_transpose(in_channels, out_channels, kernel_size, output_padding, bias, groups):
# Note we do not test with groups > 1 because that is not supported in tvm for conv transpose operations

# output padding must be smaller than either stride or dilation so we opt to make the stride 1 + output padding
def test_forward_conv_transpose(in_channels,
out_channels,
kernel_size,
output_padding,
bias,
groups):
# Note we do not test with groups > 1 because that is not supported
# in tvm for conv transpose operations

# Output padding must be smaller than either stride or dilation so we
# opt to make the stride 1 + output padding
stride = output_padding + 1

#Conv 3D Transpose Tests
Expand Down

0 comments on commit 17ab1c2

Please sign in to comment.