Skip to content

Commit

Permalink
Add dask_expr to pipeline
Browse files Browse the repository at this point in the history
  • Loading branch information
hoxbro committed May 23, 2024
1 parent 1c3b012 commit e659a5a
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 5 deletions.
12 changes: 11 additions & 1 deletion datashader/data_libraries/dask.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from __future__ import annotations

from contextlib import suppress

import numpy as np
import pandas as pd
import dask
Expand All @@ -16,7 +18,6 @@
__all__ = ()


@bypixel.pipeline.register(dd.DataFrame)
def dask_pipeline(df, schema, canvas, glyph, summary, *, antialias=False, cuda=False):
dsk, name = glyph_dispatch(glyph, df, schema, canvas, summary, antialias=antialias, cuda=cuda)

Expand All @@ -35,6 +36,15 @@ def dask_pipeline(df, schema, canvas, glyph, summary, *, antialias=False, cuda=F
return scheduler(dsk, name)


# Classic Dask.DataFrame
bypixel.pipeline.register(dd.core.DataFrame)(dask_pipeline)

with suppress(ImportError):
import dask_expr

bypixel.pipeline.register(dask_expr._collection.DataFrame)(dask_pipeline)


def shape_bounds_st_and_axis(df, canvas, glyph):
if not canvas.x_range or not canvas.y_range:
x_extents, y_extents = glyph.compute_bounds_dask(df)
Expand Down
4 changes: 0 additions & 4 deletions datashader/tests/test_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,17 +60,13 @@ def _pandas():
def _dask():
config.set(**{'dataframe.query-planning': False})
import dask.dataframe as dd
import datashader.data_libraries.dask as ds_dask
dd = reload(dd)
ds_dask = reload(ds_dask)
return dd.from_pandas(_pandas(), npartitions=2)

def _dask_expr():
config.set(**{'dataframe.query-planning': True})
import dask.dataframe as dd
import datashader.data_libraries.dask as ds_dask
dd = reload(dd)
ds_dask = reload(ds_dask)
return dd.from_pandas(_pandas(), npartitions=2)

def _dask_cudf():
Expand Down

0 comments on commit e659a5a

Please sign in to comment.