Skip to content

Commit

Permalink
lint resolved
Browse files Browse the repository at this point in the history
  • Loading branch information
ANSHUMAN TRIPATHY authored and ANSHUMAN TRIPATHY committed Feb 17, 2021
1 parent 1e03426 commit 83b5d2e
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 7 deletions.
2 changes: 2 additions & 0 deletions python/tvm/relay/op/strategy/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -821,12 +821,14 @@ def sparse_add_strategy(attrs, inputs, out_type, target):
)
return strategy


@generic_func
def schedule_sparse_add(attrs, outs, target):
"""schedule sparse_add"""
with target:
return topi.generic.schedule_sparse_add(outs)


# sparse_transpose
@generic_func
def schedule_sparse_transpose(attrs, outs, target):
Expand Down
2 changes: 2 additions & 0 deletions python/tvm/topi/generic/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,6 +729,7 @@ def schedule_sparse_transpose(outs):
"""
return _default_schedule(outs, False)


def schedule_sparse_add(outs):
"""Schedule for sparse_add
Expand All @@ -745,6 +746,7 @@ def schedule_sparse_add(outs):
"""
return _default_schedule(outs, False)


def schedule_batch_matmul(outs):
"""Schedule for batch_matmul
Expand Down
10 changes: 5 additions & 5 deletions python/tvm/topi/nn/sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,6 +357,7 @@ def sparse_dense_alter_layout(_attrs, _inputs, _tinfos, _out_type):
"""
return None


def sparse_add(dense_data, sparse_data, sparse_indices, sparse_indptr):
"""
Computes sparse-dense addition
Expand All @@ -380,10 +381,11 @@ def sparse_add(dense_data, sparse_data, sparse_indices, sparse_indptr):
output : tvm.te.Tensor
2-D with shape [M, N]
"""
#TODO(ANSHUMAN87): support BSR format too
# TODO(ANSHUMAN87): support BSR format too
assert len(sparse_data.shape) == 1
return _sparse_add_csr(dense_data, sparse_data, sparse_indices, sparse_indptr)


def _sparse_add_csr(dense_data_inp, sparse_data_inp, sparse_indices_inp, sparse_indptr_inp):
oshape = get_const_tuple(dense_data_inp.shape)

Expand Down Expand Up @@ -413,10 +415,8 @@ def _csr_add_ir(dense_data, sparse_data, sparse_indices, sparse_indptr, out_data
return te.extern(
shape=oshape,
inputs=[dense_data_inp, sparse_data_inp, sparse_indices_inp, sparse_indptr_inp],
fcompute=lambda ins, outs: _csr_add_ir(
ins[0], ins[1], ins[2], ins[3], outs[0]
),
fcompute=lambda ins, outs: _csr_add_ir(ins[0], ins[1], ins[2], ins[3], outs[0]),
tag="sparse_add_csr",
dtype=["float32", "float32", "int32", "int32"],
name="out",
)
)
3 changes: 1 addition & 2 deletions src/relay/op/nn/sparse.cc
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ RELAY_REGISTER_OP("nn.sparse_transpose")

// relay.nn.sparse_add
bool SparseAddRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,
const TypeReporter& reporter) {
const TypeReporter& reporter) {
ICHECK_EQ(types.size(), 5);
const auto* dense_data = types[0].as<TensorTypeNode>();
const auto* sparse_data = types[1].as<TensorTypeNode>();
Expand Down Expand Up @@ -236,6 +236,5 @@ RELAY_REGISTER_OP("nn.sparse_add")
.set_support_level(1)
.add_type_rel("SparseAdd", SparseAddRel);


} // namespace relay
} // namespace tvm
2 changes: 2 additions & 0 deletions tests/python/topi/python/test_topi_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,6 +525,7 @@ def test_sparse_dense_padded_alter_op():
with tvm.transform.PassContext(opt_level=3, required_pass="AlterOpLayout"):
x = relay.build(tvm.IRModule.from_expr(f), target=tvm.target.Target("cuda"))


def test_sparse_add_csr():
M, K, density = 3, 49, 0.2
X_np = np.random.randn(M, K).astype("float32")
Expand All @@ -549,6 +550,7 @@ def test_sparse_add_csr():
)
tvm.testing.assert_allclose(Z_tvm.asnumpy(), Z_np, atol=1e-4, rtol=1e-4)


if __name__ == "__main__":
test_csrmv()
test_csrmm()
Expand Down

0 comments on commit 83b5d2e

Please sign in to comment.