Skip to content

Commit

Permalink
Test finch in asv
Browse files Browse the repository at this point in the history
  • Loading branch information
mtsokol committed Apr 2, 2024
1 parent 9a8b31a commit ff411c7
Show file tree
Hide file tree
Showing 9 changed files with 206 additions and 5 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ jobs:
asv machine --yes
- name: Run benchmarks
run: |
asv run --quick
asv run --quick -v
on:
# Trigger the workflow on push or pull request,
# but only for the main branch
Expand Down
45 changes: 41 additions & 4 deletions asv.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,57 @@
"HEAD"
],
"build_command": [],
"environment_type": "conda",
"environment_type": "virtualenv",
"exclude": [
{
"env": {
"SPARSE_BACKEND": "Finch"
},
"req": {
"finch-tensor": null
}
},
{
"env": {
"SPARSE_BACKEND": "PyData"
},
"req": {
"finch-tensor": ""
}
}
],
"install_command": [
"pip install --no-deps ."
],
"matrix": {
"env": {},
"env": {
"SPARSE_BACKEND": [
"Finch",
"PyData"
]
},
"env_nobuild": {},
"req": {
"numba": [],
"numpy": []
"finch-tensor": [
"",
null
],
"juliacall": [
""
],
"numba": [
""
],
"numpy": [
""
]
}
},
"project": "sparse",
"project_url": "https://sparse.pydata.org/",
"pythons": [
"3.10"
],
"repo": ".",
"uninstall_command": [
"pip uninstall sparse --yes"
Expand Down
112 changes: 112 additions & 0 deletions benchmarks/benchmark_backends.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import sparse

import numpy as np

from .utils import SkipNotImplemented

TIMEOUT: float = 180.0
BACKEND: sparse.BackendType = sparse.backend_var.get()


class Tensordot:
timeout = TIMEOUT

def setup(self):
rng = np.random.default_rng(0)

random_kwargs = {"density": 0.01, "random_state": rng}
if sparse.BackendType.PyData == BACKEND:
random_kwargs["format"] = "gcxs"

self.s1 = sparse.random((100, 10), **random_kwargs)
self.s2 = sparse.random((100, 100, 10), **random_kwargs)

if sparse.BackendType.Finch == BACKEND:
import finch

self.s1 = self.s1.to_device(
finch.Storage(finch.Dense(finch.SparseList(finch.Element(0.0))), order=self.s1.get_order())
)
self.s2 = self.s2.to_device(
finch.Storage(
finch.Dense(finch.SparseList(finch.SparseList(finch.Element(0.0)))),
order=self.s2.get_order(),
)
)

sparse.tensordot(self.s1, self.s2, axes=([0, 1], [0, 2])) # compilation

def time_tensordot(self):
sparse.tensordot(self.s1, self.s2, axes=([0, 1], [0, 2]))


class SpMv:
timeout = TIMEOUT
params = [[True, False], [(1000, 0.01), (1000, 0.01)]] # (1_000_000, 1e-05)
param_names = ["lazy_mode", "size_and_density"]

def setup(self, lazy_mode, size_and_density):
rng = np.random.default_rng(0)
size, density = size_and_density

random_kwargs = {"density": density, "random_state": rng}
if sparse.BackendType.PyData == BACKEND:
random_kwargs["format"] = "gcxs"
random_kwargs = {"density": density, "random_state": rng}

self.M = sparse.random((size, size), **random_kwargs)
self.v1 = rng.normal(size=(size, 2))
self.v2 = rng.normal(size=(size, 2))

if sparse.BackendType.Finch == BACKEND:
import finch

self.M = self.M.to_device(
finch.Storage(finch.Dense(finch.SparseList(finch.Element(0.0))), order=self.M.get_order())
)
self.v1 = finch.Tensor(self.v1)
self.v2 = finch.Tensor(self.v2)
if lazy_mode:

@sparse.compiled
def fn(tns1, tns2, tns3):
return tns1 @ tns2 + tns3
else:

def fn(tns1, tns2, tns3):
return tns1 @ tns2 + tns3

elif sparse.BackendType.PyData == BACKEND:
if lazy_mode:
raise SkipNotImplemented("PyData doesn't have lazy mode")

def fn(tns1, tns2, tns3):
return tns1 @ tns2 + tns3

else:
raise Exception(f"Invalid backend: {BACKEND}")

self.fn = fn
self.fn(self.M, self.v1, self.v2)

def time_spmv(self, lazy_mode, size_and_density):
self.fn(self.M, self.v1, self.v2)


# class SDDMM:
# timeout = TIMEOUT

# def setup():
# pass

# class Reductions:
# timeout = TIMEOUT

# def setup():
# pass

# class Elemwise:
# timeout = TIMEOUT

# def setup():
# pass
14 changes: 14 additions & 0 deletions benchmarks/benchmark_coo.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@

import numpy as np

from .utils import skip_if_finch


class MatrixMultiplySuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.x = sparse.random((100, 100), density=0.01, random_state=rng)
self.y = sparse.random((100, 100), density=0.01, random_state=rng)
Expand All @@ -17,11 +21,14 @@ def time_matmul(self):

class ElemwiseSuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.x = sparse.random((100, 100, 100), density=0.01, random_state=rng)
self.y = sparse.random((100, 100, 100), density=0.01, random_state=rng)

self.x + self.y # Numba compilation
self.x * self.y # Numba compilation

def time_add(self):
self.x + self.y
Expand All @@ -32,10 +39,15 @@ def time_mul(self):

class ElemwiseBroadcastingSuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.x = sparse.random((100, 1, 100), density=0.01, random_state=rng)
self.y = sparse.random((100, 100), density=0.01, random_state=rng)

self.x + self.y # Numba compilation
self.x * self.y # Numba compilation

def time_add(self):
self.x + self.y

Expand All @@ -45,6 +57,8 @@ def time_mul(self):

class IndexingSuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.index = rng.integers(0, 100, 50)
self.x = sparse.random((100, 100, 100), density=0.01, random_state=rng)
Expand Down
12 changes: 12 additions & 0 deletions benchmarks/benchmark_gcxs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@

import numpy as np

from .utils import skip_if_finch


class MatrixMultiplySuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.x = sparse.random((100, 100), density=0.01, format="gcxs", random_state=rng)
self.y = sparse.random((100, 100), density=0.01, format="gcxs", random_state=rng)
Expand All @@ -17,6 +21,8 @@ def time_matmul(self):

class ElemwiseSuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.x = sparse.random((100, 100, 100), density=0.01, format="gcxs", random_state=rng)
self.y = sparse.random((100, 100, 100), density=0.01, format="gcxs", random_state=rng)
Expand All @@ -32,6 +38,8 @@ def time_mul(self):

class ElemwiseBroadcastingSuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.x = sparse.random((100, 1, 100), density=0.01, format="gcxs", random_state=rng)
self.y = sparse.random((100, 100), density=0.01, format="gcxs", random_state=rng)
Expand All @@ -45,6 +53,8 @@ def time_mul(self):

class IndexingSuite:
def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.index = rng.integers(0, 100, 50)
self.x = sparse.random((100, 100, 100), density=0.01, format="gcxs", random_state=rng)
Expand Down Expand Up @@ -74,6 +84,8 @@ class DenseMultiplySuite:
param_names = ["compressed axis", "n_vectors"]

def setup(self, compressed_axis, n_vecs):
skip_if_finch()

rng = np.random.default_rng(1337)
n = 10000
x = sparse.random((n, n), density=0.001, format="gcxs", random_state=rng).change_compressed_axes(
Expand Down
4 changes: 4 additions & 0 deletions benchmarks/benchmark_matmul.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@

import numpy as np

from .utils import skip_if_finch


class Matmul_Sparse:
params = (["coo", "gcxs"], [0, 1, None])

def setup(self, p, dens_arg):
skip_if_finch()

rng = np.random.default_rng(0)
self.x = sparse.random((100, 100), density=0.01, format=p, random_state=rng)
self.y = sparse.random((100, 100), density=0.01, format=p, random_state=rng)
Expand Down
8 changes: 8 additions & 0 deletions benchmarks/benchmark_tensordot.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import numpy as np

from .utils import skip_if_finch


class TensordotSuiteDenseSparse:
"""
Expand All @@ -10,6 +12,8 @@ class TensordotSuiteDenseSparse:
"""

def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.n = rng.random((100, 100))
self.s = sparse.random((100, 100, 100, 100), density=0.01, random_state=rng)
Expand All @@ -28,6 +32,8 @@ class TensordotSuiteSparseSparse:
"""

def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.s1 = sparse.random((100, 100), density=0.01, random_state=rng)
self.s2 = sparse.random((100, 100, 100, 100), density=0.01, random_state=rng)
Expand All @@ -46,6 +52,8 @@ class TensordotSuiteSparseDense:
"""

def setup(self):
skip_if_finch()

rng = np.random.default_rng(0)
self.s = sparse.random((100, 100, 100, 100), density=0.01, random_state=rng)
self.n = rng.random((100, 100))
Expand Down
10 changes: 10 additions & 0 deletions benchmarks/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import os

import sparse

from asv_runner.benchmarks.mark import SkipNotImplemented


def skip_if_finch():
if os.environ[sparse._ENV_VAR_NAME] == "Finch":
raise SkipNotImplemented("Finch backend is skipped.")
4 changes: 4 additions & 0 deletions sparse/finch_backend/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
int64,
int_,
lazy,
matmul,
multiply,
negative,
permute_dims,
Expand All @@ -30,6 +31,7 @@
random,
subtract,
sum,
tensordot,
uint,
uint8,
uint16,
Expand All @@ -56,6 +58,7 @@
"int64",
"int_",
"lazy",
"matmul",
"multiply",
"negative",
"permute_dims",
Expand All @@ -64,6 +67,7 @@
"random",
"subtract",
"sum",
"tensordot",
"uint",
"uint8",
"uint16",
Expand Down

0 comments on commit ff411c7

Please sign in to comment.