forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test_pytorch_onnx_shape_inference.py
91 lines (76 loc) · 4.01 KB
/
test_pytorch_onnx_shape_inference.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import unittest
import torch
import numpy as np
def expect_tensor(scalar_type, shape=None):
def verify(actual_type):
np.testing.assert_equal(actual_type.scalarType(), scalar_type)
# if shape is not None:
# np.testing.assert_equal(actual_type.sizes(), shape)
if shape is not None:
np.testing.assert_equal(actual_type.varyingSizes(), shape)
return verify
class TestONNXShapeInference(unittest.TestCase):
from torch.onnx.symbolic_helper import _onnx_main_opset
opset_version = _onnx_main_opset
def run_test(self, g, n, type_assertion_funcs):
if not isinstance(type_assertion_funcs, list):
type_assertion_funcs = [type_assertion_funcs]
torch._C._jit_pass_onnx_graph_shape_type_inference(g, {}, self.opset_version)
for out, type_assertion_func in zip(n.outputs(), type_assertion_funcs):
type_assertion_func(out.type())
def create_empty_graph(self):
g = torch._C.Graph()
# kick off initialization for ConstantMap.
torch._C._jit_pass_onnx_graph_shape_type_inference(g, {}, self.opset_version)
return g
def insert_tensor_constant(self, g, tensor):
return g.op("Constant", value_t=tensor)
def test_cast(self):
# Test cast with input of unknown scalar type.
g = self.create_empty_graph()
input = g.addInput()
cast_out = g.op("Cast", input, to_i=1)
self.run_test(g, cast_out.node(), expect_tensor("Float"))
def test_constant_of_shape(self):
# Test ConstantOfShape with input of onnx::Shape node.
g = self.create_empty_graph()
constant = self.insert_tensor_constant(g, torch.ones(1, 2, 3, 4))
shape = g.op("Shape", constant)
constant_of_shape = g.op("ConstantOfShape", shape, value_t=torch.tensor([2.0]))
self.run_test(g, constant_of_shape.node(), expect_tensor("Float", shape=(1, 2, 3, 4)))
def test_constant_of_shape_static(self):
# Test ConstantOfShape with input of prim::ListConstruct of static tensor
rank = 4
g = self.create_empty_graph()
constants = [self.insert_tensor_constant(g, torch.tensor(i + 1)) for i in range(rank)]
shape = g.op("prim::ListConstruct", *constants)
shape.setType(torch._C.ListType.ofInts())
constant_of_shape = g.op("ConstantOfShape", shape, value_t=torch.tensor([2.0]))
self.run_test(g, constant_of_shape.node(), expect_tensor("Float", shape=(1, 2, 3, 4)))
def test_constant_of_shape_dynamic(self):
# Test ConstantOfShape with input of prim::ListConstruct of dynamic tensor
rank = 4
g = self.create_empty_graph()
inputs = [g.addInput() for i in range(rank)]
shape = g.op("prim::ListConstruct", *inputs)
shape.setType(torch._C.ListType.ofInts())
constant_of_shape = g.op("ConstantOfShape", shape, value_t=torch.tensor([2.0]))
self.run_test(g, constant_of_shape.node(), expect_tensor("Float", shape=(None, None, None, None)))
def test_reshape(self):
g = self.create_empty_graph()
constant = self.insert_tensor_constant(g, torch.ones(2, 16, 5, 5))
constant_2 = self.insert_tensor_constant(g, torch.tensor([2, 0, -1]))
shape = g.op("Reshape", constant, constant_2)
self.run_test(g, shape.node(), expect_tensor("Float", shape=(2, 16, 25)))
g = self.create_empty_graph()
constant = self.insert_tensor_constant(g, torch.ones(2, 16, 5, 4))
constant_2 = self.insert_tensor_constant(g, torch.tensor([-1, 0, 4]))
shape = g.op("Reshape", constant, constant_2)
self.run_test(g, shape.node(), expect_tensor("Float", shape=(10, 16, 4)))
g = self.create_empty_graph()
constant = self.insert_tensor_constant(g, torch.ones(2, 16, 5, 4))
constant_2 = self.insert_tensor_constant(g, torch.tensor([-1, 0, 0]))
shape = g.op("Reshape", constant, constant_2)
self.run_test(g, shape.node(), expect_tensor("Float", shape=(8, 16, 5)))
if __name__ == '__main__':
unittest.main()