From ad478568fa878f02cb835359387f3704272dc3b4 Mon Sep 17 00:00:00 2001 From: Leyuan Wang Date: Sun, 31 Mar 2019 07:11:11 +0000 Subject: [PATCH 1/7] gather_nd added --- docs/langref/relay_op.rst | 2 + python/tvm/relay/frontend/mxnet.py | 8 +++- python/tvm/relay/op/_transform.py | 1 + python/tvm/relay/op/transform.py | 33 +++++++++++++ src/relay/op/tensor/transform.cc | 71 ++++++++++++++++++++++++++++ tests/python/relay/test_op_level3.py | 19 +++++++- 6 files changed, 132 insertions(+), 2 deletions(-) diff --git a/docs/langref/relay_op.rst b/docs/langref/relay_op.rst index bbb27ec83b48..b2db587c4fc9 100644 --- a/docs/langref/relay_op.rst +++ b/docs/langref/relay_op.rst @@ -92,6 +92,7 @@ This level enables additional math and transform operators. tvm.relay.zeros_like tvm.relay.ones tvm.relay.ones_like + tvm.relay.gather_nd tvm.relay.full tvm.relay.full_like tvm.relay.cast @@ -225,6 +226,7 @@ Level 3 Definitions .. autofunction:: tvm.relay.zeros_like .. autofunction:: tvm.relay.ones .. autofunction:: tvm.relay.ones_like +.. autofunction:: tvm.relay.gather_nd .. autofunction:: tvm.relay.full .. autofunction:: tvm.relay.full_like .. autofunction:: tvm.relay.cast diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 8e36801f98ba..3081da0e9831 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -631,6 +631,12 @@ def _mx_deformable_convolution(inputs, attrs): return res +def _mx_gather_nd(inputs, attrs): + new_attrs = {} + assert len(inputs) == 2 + return _op.gather_nd(inputs[0], inputs[1]) + + # Note: due to attribute conversion constraint # ops in the identity set must be attribute free _identity_list = [ @@ -768,6 +774,7 @@ def _mx_deformable_convolution(inputs, attrs): "SoftmaxOutput" : _mx_softmax_output, "SoftmaxActivation" : _mx_softmax_activation, "smooth_l1" : _mx_smooth_l1, + "gather_nd" : _mx_gather_nd, # vision "_contrib_BilinearResize2D" : _mx_upsampling, "_contrib_MultiBoxPrior" : _mx_multibox_prior, @@ -782,7 +789,6 @@ def _mx_deformable_convolution(inputs, attrs): # TODO(tvm-tvm): support all operators. # # "broadcast_to", - # "gather_nd", # "Crop" : _crop_like, } diff --git a/python/tvm/relay/op/_transform.py b/python/tvm/relay/op/_transform.py index 72fbca967555..7575d10f7753 100644 --- a/python/tvm/relay/op/_transform.py +++ b/python/tvm/relay/op/_transform.py @@ -32,6 +32,7 @@ _reg.register_schedule("stack", schedule_injective) _reg.register_schedule("concatenate", schedule_injective) _reg.register_schedule("_contrib_reverse_reshape", schedule_injective) +_reg.register_schedule("gather_nd", schedule_injective) # layout_transform _reg.register_schedule("layout_transform", schedule_injective) diff --git a/python/tvm/relay/op/transform.py b/python/tvm/relay/op/transform.py index 73573043946c..02065a865dc0 100644 --- a/python/tvm/relay/op/transform.py +++ b/python/tvm/relay/op/transform.py @@ -651,3 +651,36 @@ def reverse_reshape(data, newshape): if isinstance(newshape, int): newshape = [newshape] return _make._contrib_reverse_reshape(data, list(newshape)) + + +def gather_nd(data, indices): + """Gather elements or slices from data and store to a tensor whose shape is + defined by indices. + + Parameters + ---------- + data : relay.Expr + The input data to the operator. + + indices : tuple of int + The shape of output tensor. + + Returns + ------- + ret : relay.Expr + The computed result. + + Examples + -------- + .. code-block:: python + + data = [[0, 1], [2, 3]] + indices = [[1, 1, 0], [0, 1, 0]] + relay.gather_nd(data, indices) = [2, 3, 0] + + data = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] + indices = [[0, 1], [1, 0]] + relay.gather_nd(data, indices) = [[3, 4], [5, 6]] + """ + + return _make.gather_nd(data, indices) diff --git a/src/relay/op/tensor/transform.cc b/src/relay/op/tensor/transform.cc index 08b06a2a084f..0038362aeb15 100644 --- a/src/relay/op/tensor/transform.cc +++ b/src/relay/op/tensor/transform.cc @@ -2122,5 +2122,76 @@ example below:: .set_attr("FTVMCompute", ReshapeCompute) .set_attr("TOpPattern", kInjective); +// gather_nd operator +bool GatherNDRel(const Array& types, + int num_inputs, + const Attrs& attrs, + const TypeReporter& reporter) { + // `types` contains: [data, indices, result] + CHECK_EQ(types.size(), 3); + const auto* data = types[0].as(); + const auto* indices = types[1].as(); + if (data == nullptr) { + CHECK(types[0].as()) + << "GatherND: expect input type to be TensorType but get " + << types[0]; + return false; + } + if (indices == nullptr) { + CHECK(types[1].as()) + << "GatherND: expect indices type to be TensorType but get " + << types[1]; + return false; + } + const size_t ndim = data->shape.size(); + const size_t mdim = indices->shape[0]; + const size_t kdim = indices->shape.size() - 1; + CHECK(mdim <= ndim) + << "GatherND: indices shape does satisfy."; + + Array oshape; + for (size_t i = 1; i < kdim + 1; ++i) + oshape.push_back(indices->shape[i]); + if (mdim < ndim) + for (size_t i = mdim; i < ndim; ++i) + oshape.push_back(data->shape[i]); + reporter->Assign(types[1], TensorTypeNode::make(oshape, data->dtype)); + return true; +} + +Array GatherNDCompute(const Attrs& attrs, + const Array& inputs, + const Type& out_type, + const Target& target) { + return { topi::gather_nd(inputs[0], inputs[1]) }; +} + +Expr MakeGatherND(Expr data, + Expr indices) { + static const Op& op = Op::Get("GatherND"); + return CallNode::make(op, {data}, {}); +} + +TVM_REGISTER_API("relay.op._make.gather_nd") +.set_body([](const TVMArgs& args, TVMRetValue* rv) { + runtime::detail::unpack_call(MakeGatherND, args, rv); +}); + +RELAY_REGISTER_OP("gather_nd") +.describe(R"code(Gather elements or slices from data and store to + a tensor whose shape is defined by indices. + +Given data with shape (X_0, X_1, ..., X_{N-1}) and indices with +shape (M, Y_0, ..., Y_{K-1}), the output will have shape +(Y_0, ..., Y_{K-1}, X_M, ..., X_{N-1}), where M <= N. If M == N, +output shape will simply be (Y_0, ..., Y_{K-1}). +)code" TVM_ADD_FILELINE) +.set_num_inputs(2) +.add_argument("data", "Tensor", "The input tensor.") +.set_support_level(3) +.add_type_rel("GatherND", GatherNDRel) +.set_attr("FTVMCompute", GatherNDCompute) +.set_attr("TOpPattern", kInjective); + } // namespace relay } // namespace tvm diff --git a/tests/python/relay/test_op_level3.py b/tests/python/relay/test_op_level3.py index 0cfbcc2c0378..7919e538eb5a 100644 --- a/tests/python/relay/test_op_level3.py +++ b/tests/python/relay/test_op_level3.py @@ -553,7 +553,6 @@ def verify_stack(dshapes, axis): verify_stack([(2, 2, 3, 4), (2, 2, 3, 4), (2, 2, 3, 4), (2, 2, 3, 4)], -1) - def test_reverse(): def verify_reverse(dshape, axis): x = relay.var("x", relay.TensorType(dshape, "float32")) @@ -573,6 +572,23 @@ def verify_reverse(dshape, axis): verify_reverse((2, 3, 4), -1) +def test_gather_nd(): + def verify_gather_nd(dshape, indices): + x = relay.var("x", relay.TensorType(dshape, "float32")) + z = relay.gather_nd(x, indices=indices) + + func = relay.Function([x], z) + x_data = np.random.uniform(size=dshape).astype("float32") + ref_res = x_data[indices] + + for target, ctx in ctx_list(): + for kind in ["graph", "debug"]: + intrp = relay.create_executor(kind, ctx=ctx, target=target) + op_res = intrp.evaluate(func)(x_data) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) + verify_gather_nd((2, 2), [[1, 1, 0], [0, 1, 0]]) + + if __name__ == "__main__": test_cast() test_zeros_ones() @@ -601,3 +617,4 @@ def verify_reverse(dshape, axis): test_stack() test_tile() test_repeat() + test_gather_nd() From ec3086bc6c3bb43f018eadad098a5ad05fb98fee Mon Sep 17 00:00:00 2001 From: Leyuan Wang Date: Mon, 1 Apr 2019 22:35:10 +0000 Subject: [PATCH 2/7] gather_nd test added --- python/tvm/relay/op/transform.py | 2 +- src/relay/op/tensor/transform.cc | 17 ++++++++--------- tests/python/relay/test_op_level3.py | 18 ++++++++++-------- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/python/tvm/relay/op/transform.py b/python/tvm/relay/op/transform.py index 02065a865dc0..406147c395bf 100644 --- a/python/tvm/relay/op/transform.py +++ b/python/tvm/relay/op/transform.py @@ -662,7 +662,7 @@ def gather_nd(data, indices): data : relay.Expr The input data to the operator. - indices : tuple of int + indices : relay.Expr The shape of output tensor. Returns diff --git a/src/relay/op/tensor/transform.cc b/src/relay/op/tensor/transform.cc index 0038362aeb15..06c788c64b1b 100644 --- a/src/relay/op/tensor/transform.cc +++ b/src/relay/op/tensor/transform.cc @@ -2133,7 +2133,7 @@ bool GatherNDRel(const Array& types, const auto* indices = types[1].as(); if (data == nullptr) { CHECK(types[0].as()) - << "GatherND: expect input type to be TensorType but get " + << "GatherND: expect input data type to be TensorType but get " << types[0]; return false; } @@ -2144,18 +2144,17 @@ bool GatherNDRel(const Array& types, return false; } const size_t ndim = data->shape.size(); - const size_t mdim = indices->shape[0]; + const IntImm* mdim = data->shape[0].as(); const size_t kdim = indices->shape.size() - 1; - CHECK(mdim <= ndim) + CHECK(mdim->value <= ndim) << "GatherND: indices shape does satisfy."; Array oshape; for (size_t i = 1; i < kdim + 1; ++i) oshape.push_back(indices->shape[i]); - if (mdim < ndim) - for (size_t i = mdim; i < ndim; ++i) - oshape.push_back(data->shape[i]); - reporter->Assign(types[1], TensorTypeNode::make(oshape, data->dtype)); + for (size_t i = mdim->value; i < ndim; ++i) + oshape.push_back(data->shape[i]); + reporter->Assign(types[2], TensorTypeNode::make(oshape, data->dtype)); return true; } @@ -2168,8 +2167,8 @@ Array GatherNDCompute(const Attrs& attrs, Expr MakeGatherND(Expr data, Expr indices) { - static const Op& op = Op::Get("GatherND"); - return CallNode::make(op, {data}, {}); + static const Op& op = Op::Get("gather_nd"); + return CallNode::make(op, {data, indices}, {}); } TVM_REGISTER_API("relay.op._make.gather_nd") diff --git a/tests/python/relay/test_op_level3.py b/tests/python/relay/test_op_level3.py index 7919e538eb5a..4424b624312d 100644 --- a/tests/python/relay/test_op_level3.py +++ b/tests/python/relay/test_op_level3.py @@ -573,20 +573,22 @@ def verify_reverse(dshape, axis): def test_gather_nd(): - def verify_gather_nd(dshape, indices): - x = relay.var("x", relay.TensorType(dshape, "float32")) - z = relay.gather_nd(x, indices=indices) + def verify_gather_nd(xshape, yshape, y_data): + x = relay.var("x", relay.TensorType(xshape, "float32")) + y = relay.var("y", relay.TensorType(yshape, "int32")) + z = relay.gather_nd(x, y) - func = relay.Function([x], z) - x_data = np.random.uniform(size=dshape).astype("float32") - ref_res = x_data[indices] + func = relay.Function([x, y], z) + x_data = np.random.uniform(size=xshape).astype("float32") + ref_res = x_data[y_data] for target, ctx in ctx_list(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) - op_res = intrp.evaluate(func)(x_data) + op_res = intrp.evaluate(func)(x_data, y_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) - verify_gather_nd((2, 2), [[1, 1, 0], [0, 1, 0]]) + verify_gather_nd((2, 2), (2, 3), [[1, 1, 0], [0, 1, 0]]) + verify_gather_nd((2, 2, 2), (2, 2), [[0, 1], [1, 0]]) if __name__ == "__main__": From 1e5db89f725b8de6b9b53196c37054d21b89d590 Mon Sep 17 00:00:00 2001 From: Leyuan Wang Date: Mon, 1 Apr 2019 23:06:24 +0000 Subject: [PATCH 3/7] more test added --- tests/python/frontend/mxnet/test_forward.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 9d0d59402ecd..797f8c025dcd 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -491,6 +491,21 @@ def verify(shape, indices_src, axis, mode="clip"): verify((3,4), [-1, 5], 1) verify((3,4), [-1, 5], 1, mode="wrap") +def test_forward_gather_nd(): + def verify(xshape, yshape, y_data): + x_data = np.random.uniform(size=xshape).astype("float32") + ref_res = mx.nd.gather_nd(mx.nd.array(x_data), mx.nd.array(y_data)) + mx_sym = mx.sym.gather_nd(mx.sym.var("x_data"), mx.sym.var("y_data")) + new_sym, _ = relay.frontend.from_mxnet(mx_sym, {"x_data": xshape, "y_data": yshape}, {"x_data": "float32", "y_data": "int32"}) + for target, ctx in ctx_list(): + for kind in ["graph", "debug"]: + intrp = relay.create_executor(kind, ctx=ctx, target=target) + op_res = intrp.evaluate(new_sym)(x_data, y_data) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy()) + verify((2, 2), (2, 3), [[1, 1, 0], [0, 1, 0]]) + verify((2, 2, 2), (2, 2), [[0, 1], [1, 0]]) + + if __name__ == '__main__': test_forward_mlp() test_forward_vgg() @@ -527,3 +542,4 @@ def verify(shape, indices_src, axis, mode="clip"): test_forward_embedding() test_forward_smooth_l1() test_forward_take() + test_forward_gather_nd() From 9381c2c3a66ef4f8942ae3d6d85d042f834db1f0 Mon Sep 17 00:00:00 2001 From: Leyuan Wang Date: Mon, 1 Apr 2019 23:28:24 +0000 Subject: [PATCH 4/7] fix lint --- python/tvm/relay/frontend/mxnet.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 3081da0e9831..3a7bf996f286 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -631,8 +631,7 @@ def _mx_deformable_convolution(inputs, attrs): return res -def _mx_gather_nd(inputs, attrs): - new_attrs = {} +def _mx_gather_nd(inputs): assert len(inputs) == 2 return _op.gather_nd(inputs[0], inputs[1]) From e08bcc2cd41c83670a50be909b9ddfdf705b0de1 Mon Sep 17 00:00:00 2001 From: Leyuan Wang Date: Mon, 1 Apr 2019 23:34:40 +0000 Subject: [PATCH 5/7] fix build error --- src/relay/op/tensor/transform.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/relay/op/tensor/transform.cc b/src/relay/op/tensor/transform.cc index 06c788c64b1b..c9bc33c68827 100644 --- a/src/relay/op/tensor/transform.cc +++ b/src/relay/op/tensor/transform.cc @@ -2146,7 +2146,7 @@ bool GatherNDRel(const Array& types, const size_t ndim = data->shape.size(); const IntImm* mdim = data->shape[0].as(); const size_t kdim = indices->shape.size() - 1; - CHECK(mdim->value <= ndim) + CHECK(size_t(mdim->value) <= ndim) << "GatherND: indices shape does satisfy."; Array oshape; From ea363aa3ba0fdfe9a6cf7fcd89d9ac3381e6e66e Mon Sep 17 00:00:00 2001 From: Leyuan Wang Date: Tue, 2 Apr 2019 01:06:03 +0000 Subject: [PATCH 6/7] fix lint --- python/tvm/relay/frontend/mxnet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 3a7bf996f286..b80b8ab01897 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -1,4 +1,4 @@ -# pylint: disable=invalid-name, import-self, len-as-condition +# pylint: disable=invalid-name, import-self, len-as-condition, unused-argument """MXNet symbol frontend.""" from __future__ import absolute_import as _abs @@ -631,7 +631,7 @@ def _mx_deformable_convolution(inputs, attrs): return res -def _mx_gather_nd(inputs): +def _mx_gather_nd(inputs, attrs): assert len(inputs) == 2 return _op.gather_nd(inputs[0], inputs[1]) From 29924a4cf7c07916cefeb7415d1088217e006126 Mon Sep 17 00:00:00 2001 From: Leyuan Wang Date: Tue, 2 Apr 2019 05:07:17 +0000 Subject: [PATCH 7/7] comments addressed --- python/tvm/relay/frontend/mxnet.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index b80b8ab01897..357bc2da62af 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -1,4 +1,4 @@ -# pylint: disable=invalid-name, import-self, len-as-condition, unused-argument +# pylint: disable=invalid-name, import-self, len-as-condition """MXNet symbol frontend.""" from __future__ import absolute_import as _abs @@ -631,11 +631,6 @@ def _mx_deformable_convolution(inputs, attrs): return res -def _mx_gather_nd(inputs, attrs): - assert len(inputs) == 2 - return _op.gather_nd(inputs[0], inputs[1]) - - # Note: due to attribute conversion constraint # ops in the identity set must be attribute free _identity_list = [ @@ -651,6 +646,7 @@ def _mx_gather_nd(inputs, attrs): "zeros_like", "ones_like", "where", + "gather_nd", ] _convert_map = { @@ -773,7 +769,6 @@ def _mx_gather_nd(inputs, attrs): "SoftmaxOutput" : _mx_softmax_output, "SoftmaxActivation" : _mx_softmax_activation, "smooth_l1" : _mx_smooth_l1, - "gather_nd" : _mx_gather_nd, # vision "_contrib_BilinearResize2D" : _mx_upsampling, "_contrib_MultiBoxPrior" : _mx_multibox_prior,