Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CFTime support for polyval #6624

Merged
merged 19 commits into from
May 31, 2022
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
*.py[cod]
__pycache__
.env
.venv

# example caches from Hypothesis
.hypothesis/
Expand Down
31 changes: 23 additions & 8 deletions xarray/core/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from .options import OPTIONS, _get_keep_attrs
from .pycompat import is_duck_dask_array
from .types import T_DataArray
from .utils import is_dict_like
from .utils import is_dict_like, is_scalar
from .variable import Variable

if TYPE_CHECKING:
Expand Down Expand Up @@ -1887,6 +1887,15 @@ def polyval(coord: Dataset, coeffs: Dataset, degree_dim: Hashable) -> Dataset:
...


@overload
def polyval(
coord: Dataset | DataArray,
coeffs: Dataset | DataArray,
degree_dim: Hashable = "degree",
) -> Dataset | DataArray:
...


def polyval(
coord: Dataset | DataArray,
coeffs: Dataset | DataArray,
Expand Down Expand Up @@ -1953,15 +1962,21 @@ def _ensure_numeric(data: Dataset | DataArray) -> Dataset | DataArray:
"""
from .dataset import Dataset

def _cfoffset(x: DataArray) -> Any:
scalar = x.compute().data[0]
if not is_scalar(scalar):
# we do not get a scalar back on dask == 2021.04.1
scalar = scalar.item()
return type(scalar)(1970, 1, 1)

def to_floatable(x: DataArray) -> DataArray:
if x.dtype.kind == "M":
# datetimes
if x.dtype.kind in "MO":
# datetimes (CFIndexes are object type)
offset = (
np.datetime64("1970-01-01") if x.dtype.kind == "M" else _cfoffset(x)
Copy link
Collaborator

@keewis keewis May 29, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we get a test with a non-standard calendar / something very far from 1970 (like, 480-01-01)? I suspect that will cause the cftime support to fail because _cfoffset hard-codes the offset to 1970-01-01 (not sure, though, I might misunderstand the code).

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was added to copy the behavior of polyfit . But I agree that such a test can be useful. Additionally we should add an integration test for these two functions.

Copy link
Contributor

@dcherian dcherian May 31, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That offset choice is in v2022.03.0 too:

# Special case for non-standard calendar indexes
# Numerical datetime values are defined with respect to 1970-01-01T00:00:00 in units of nanoseconds
if isinstance(index, (CFTimeIndex, pd.DatetimeIndex)):
offset = type(index[0])(1970, 1, 1)
if isinstance(index, CFTimeIndex):
index = index.values

)
return x.copy(
data=datetime_to_numeric(
x.data,
offset=np.datetime64("1970-01-01"),
datetime_unit="ns",
),
data=datetime_to_numeric(x.data, offset=offset, datetime_unit="ns"),
)
elif x.dtype.kind == "m":
# timedeltas
Expand Down
22 changes: 19 additions & 3 deletions xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,14 @@ def datetime_to_numeric(array, offset=None, datetime_unit=None, dtype=float):
# Compute timedelta object.
# For np.datetime64, this can silently yield garbage due to overflow.
# One option is to enforce 1970-01-01 as the universal offset.
array = array - offset

# This map_blocks call is for backwards compatibility.
# dask == 2021.04.1 does not support subtracting object arrays
# which is required for cftime
if is_duck_dask_array(array) and np.issubdtype(array.dtype, np.object):
array = array.map_blocks(lambda a, b: a - b, offset, meta=array._meta)
else:
array = array - offset

# Scalar is converted to 0d-array
if not hasattr(array, "dtype"):
Expand Down Expand Up @@ -517,10 +524,19 @@ def pd_timedelta_to_float(value, datetime_unit):
return np_timedelta64_to_float(value, datetime_unit)


def _timedelta_to_seconds(array):
return np.reshape([a.total_seconds() for a in array.ravel()], array.shape) * 1e6


def py_timedelta_to_float(array, datetime_unit):
"""Convert a timedelta object to a float, possibly at a loss of resolution."""
array = np.asarray(array)
array = np.reshape([a.total_seconds() for a in array.ravel()], array.shape) * 1e6
array = asarray(array)
if is_duck_dask_array(array):
array = array.map_blocks(
_timedelta_to_seconds, meta=np.array([], dtype=np.float64)
)
else:
array = _timedelta_to_seconds(array)
conversion_factor = np.timedelta64(1, "us") / np.timedelta64(1, datetime_unit)
return conversion_factor * array

Expand Down
46 changes: 43 additions & 3 deletions xarray/tests/test_computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,13 @@
from xarray.core.pycompat import dask_version
from xarray.core.types import T_Xarray

from . import has_dask, raise_if_dask_computes, requires_dask
from . import (
has_cftime,
has_dask,
raise_if_dask_computes,
requires_cftime,
requires_dask,
)


def assert_identical(a, b):
Expand Down Expand Up @@ -1936,7 +1942,9 @@ def test_where_attrs() -> None:
assert actual.attrs == {}


@pytest.mark.parametrize("use_dask", [False, True])
@pytest.mark.parametrize(
"use_dask", [pytest.param(False, id="nodask"), pytest.param(True, id="dask")]
)
@pytest.mark.parametrize(
["x", "coeffs", "expected"],
[
Expand Down Expand Up @@ -2031,8 +2039,40 @@ def test_polyval(
pytest.skip("requires dask")
coeffs = coeffs.chunk({"degree": 2})
x = x.chunk({"x": 2})

with raise_if_dask_computes():
actual = xr.polyval(coord=x, coeffs=coeffs) # type: ignore
actual = xr.polyval(coord=x, coeffs=coeffs)

xr.testing.assert_allclose(actual, expected)


@requires_cftime
@pytest.mark.parametrize(
"use_dask", [pytest.param(False, id="nodask"), pytest.param(True, id="dask")]
)
def test_polyval_cftime(use_dask: bool) -> None:
x = xr.DataArray(
xr.date_range("1970-01-01", freq="1S", periods=3, use_cftime=True),
dims="x",
)
coeffs = xr.DataArray([0, 1], dims="degree", coords={"degree": [0, 1]})

if use_dask:
if not has_dask:
pytest.skip("requires dask")
coeffs = coeffs.chunk({"degree": 2})
x = x.chunk({"x": 2})

with raise_if_dask_computes(max_computes=1):
actual = xr.polyval(coord=x, coeffs=coeffs)

expected = xr.DataArray(
[0, 1e9, 2e9],
dims="x",
coords={
"x": xr.date_range("1970-01-01", freq="1S", periods=3, use_cftime=True)
},
)
xr.testing.assert_allclose(actual, expected)


Expand Down
49 changes: 39 additions & 10 deletions xarray/tests/test_duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -675,39 +675,68 @@ def test_multiple_dims(dtype, dask, skipna, func):
assert_allclose(actual, expected)


def test_datetime_to_numeric_datetime64():
@pytest.mark.parametrize("dask", [True, False])
def test_datetime_to_numeric_datetime64(dask):
if dask and not has_dask:
pytest.skip("requires dask")

times = pd.date_range("2000", periods=5, freq="7D").values
result = duck_array_ops.datetime_to_numeric(times, datetime_unit="h")
if dask:
import dask.array

times = dask.array.from_array(times, chunks=-1)

with raise_if_dask_computes():
result = duck_array_ops.datetime_to_numeric(times, datetime_unit="h")
expected = 24 * np.arange(0, 35, 7)
np.testing.assert_array_equal(result, expected)

offset = times[1]
result = duck_array_ops.datetime_to_numeric(times, offset=offset, datetime_unit="h")
with raise_if_dask_computes():
result = duck_array_ops.datetime_to_numeric(
times, offset=offset, datetime_unit="h"
)
expected = 24 * np.arange(-7, 28, 7)
np.testing.assert_array_equal(result, expected)

dtype = np.float32
result = duck_array_ops.datetime_to_numeric(times, datetime_unit="h", dtype=dtype)
with raise_if_dask_computes():
result = duck_array_ops.datetime_to_numeric(
times, datetime_unit="h", dtype=dtype
)
expected = 24 * np.arange(0, 35, 7).astype(dtype)
np.testing.assert_array_equal(result, expected)


@requires_cftime
def test_datetime_to_numeric_cftime():
@pytest.mark.parametrize("dask", [True, False])
def test_datetime_to_numeric_cftime(dask):
if dask and not has_dask:
pytest.skip("requires dask")

times = cftime_range("2000", periods=5, freq="7D", calendar="standard").values
result = duck_array_ops.datetime_to_numeric(times, datetime_unit="h", dtype=int)
if dask:
import dask.array

times = dask.array.from_array(times, chunks=-1)
with raise_if_dask_computes():
result = duck_array_ops.datetime_to_numeric(times, datetime_unit="h", dtype=int)
expected = 24 * np.arange(0, 35, 7)
np.testing.assert_array_equal(result, expected)

offset = times[1]
result = duck_array_ops.datetime_to_numeric(
times, offset=offset, datetime_unit="h", dtype=int
)
with raise_if_dask_computes():
result = duck_array_ops.datetime_to_numeric(
times, offset=offset, datetime_unit="h", dtype=int
)
expected = 24 * np.arange(-7, 28, 7)
np.testing.assert_array_equal(result, expected)

dtype = np.float32
result = duck_array_ops.datetime_to_numeric(times, datetime_unit="h", dtype=dtype)
with raise_if_dask_computes():
result = duck_array_ops.datetime_to_numeric(
times, datetime_unit="h", dtype=dtype
)
expected = 24 * np.arange(0, 35, 7).astype(dtype)
np.testing.assert_array_equal(result, expected)

Expand Down