Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump minimum versions #9796

Merged
merged 3 commits into from
Nov 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ci/requirements/doc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ dependencies:
- netcdf4>=1.5
- numba
- numpy>=2
- packaging>=21.3
- packaging>=23.2
- pandas>=1.4,!=2.1.0
- pooch
- pip
Expand Down
18 changes: 9 additions & 9 deletions ci/requirements/min-all-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,19 @@ dependencies:
# doc/user-guide/installing.rst, doc/user-guide/plotting.rst and setup.py.
- python=3.10
- array-api-strict=1.0 # dependency for testing the array api compat
- boto3=1.28
- boto3=1.29
- bottleneck=1.3
- cartopy=0.22
- cftime=1.6
- coveralls
- dask-core=2023.9
- distributed=2023.9
- dask-core=2023.11
- distributed=2023.11
# Flox > 0.8 has a bug with numbagg versions
# It will require numbagg > 0.6
# so we should just skip that series eventually
# or keep flox pinned for longer than necessary
- flox=0.7
- h5netcdf=1.2
- h5netcdf=1.3
# h5py and hdf5 tend to cause conflicts
# for e.g. hdf5 1.12 conflicts with h5py=3.1
# prioritize bumping other packages instead
Expand All @@ -30,15 +30,15 @@ dependencies:
- hypothesis
- iris=3.7
- lxml=4.9 # Optional dep of pydap
- matplotlib-base=3.7
- matplotlib-base=3.8
- nc-time-axis=1.4
# netcdf follows a 1.major.minor[.patch] convention
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
- netcdf4=1.6.0
- numba=0.57
- numbagg=0.2.1
- numbagg=0.6
- numpy=1.24
- packaging=23.1
- packaging=23.2
- pandas=2.1
- pint=0.22
- pip
Expand All @@ -50,8 +50,8 @@ dependencies:
- pytest-timeout
- rasterio=1.3
- scipy=1.11
- seaborn=0.12
- seaborn=0.13
- sparse=0.14
- toolz=0.12
- typing_extensions=4.7
- typing_extensions=4.8
- zarr=2.16
16 changes: 16 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,22 @@ What's New
v.2024.10.1 (unreleased)
------------------------


Breaking Changes
~~~~~~~~~~~~~~~~
- The minimum versions of some dependencies were changed

===================== ========= =======
Package Old New
===================== ========= =======
boto3 1.28 1.29
dask-core 2023.9 2023.11
distributed 2023.9 2023.11
h5netcdf 1.2 1.3
numbagg 0.2.1 0.6
typing_extensions 4.7 4.8
===================== ========= =======

New Features
dcherian marked this conversation as resolved.
Show resolved Hide resolved
~~~~~~~~~~~~
- Added :py:meth:`DataTree.persist` method (:issue:`9675`, :pull:`9682`).
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ requires-python = ">=3.10"

dependencies = [
"numpy>=1.24",
"packaging>=23.1",
"packaging>=23.2",
"pandas>=2.1",
]

Expand Down
10 changes: 1 addition & 9 deletions xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,11 @@
transpose,
unravel_index,
)
from packaging.version import Version
from pandas.api.types import is_extension_array_dtype

from xarray.core import dask_array_compat, dask_array_ops, dtypes, nputils
from xarray.core.options import OPTIONS
from xarray.core.utils import is_duck_array, is_duck_dask_array, module_available
from xarray.namedarray import pycompat
from xarray.namedarray.parallelcompat import get_chunked_array_type
from xarray.namedarray.pycompat import array_type, is_chunked_array

Expand Down Expand Up @@ -770,13 +768,7 @@ def _push(array, n: int | None = None, axis: int = -1):
if OPTIONS["use_numbagg"] and module_available("numbagg"):
import numbagg

if pycompat.mod_version("numbagg") < Version("0.6.2"):
warnings.warn(
f"numbagg >= 0.6.2 is required for bfill & ffill; {pycompat.mod_version('numbagg')} is installed. We'll attempt with bottleneck instead.",
stacklevel=2,
)
else:
return numbagg.ffill(array, limit=n, axis=axis)
return numbagg.ffill(array, limit=n, axis=axis)

# work around for bottleneck 178
limit = n if n is not None else array.shape[axis]
Expand Down
1 change: 0 additions & 1 deletion xarray/core/nputils.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,6 @@ def f(values, axis=None, **kwargs):

if (
module_available("numbagg")
and pycompat.mod_version("numbagg") >= Version("0.5.0")
and OPTIONS["use_numbagg"]
and isinstance(values, np.ndarray)
# numbagg<0.7.0 uses ddof=1 only, but numpy uses ddof=0 by default
Expand Down
3 changes: 0 additions & 3 deletions xarray/core/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from typing import TYPE_CHECKING, Any, Generic, TypeVar

import numpy as np
from packaging.version import Version

from xarray.core import dask_array_ops, dtypes, duck_array_ops, utils
from xarray.core.arithmetic import CoarsenArithmetic
Expand All @@ -19,7 +18,6 @@
is_duck_dask_array,
module_available,
)
from xarray.namedarray import pycompat
from xarray.util.deprecation_helpers import _deprecate_positional_args

try:
Expand Down Expand Up @@ -713,7 +711,6 @@ def _array_reduce(
if (
OPTIONS["use_numbagg"]
and module_available("numbagg")
and pycompat.mod_version("numbagg") >= Version("0.6.3")
and numbagg_move_func is not None
# TODO: we could at least allow this for the equivalent of `apply_ufunc`'s
# "parallelized". `rolling_exp` does this, as an example (but rolling_exp is
Expand Down
26 changes: 0 additions & 26 deletions xarray/core/rolling_exp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,12 @@
from typing import Any, Generic

import numpy as np
from packaging.version import Version

from xarray.core.computation import apply_ufunc
from xarray.core.options import _get_keep_attrs
from xarray.core.pdcompat import count_not_none
from xarray.core.types import T_DataWithCoords
from xarray.core.utils import module_available
from xarray.namedarray import pycompat


def _get_alpha(
Expand Down Expand Up @@ -81,14 +79,6 @@ def __init__(
raise ImportError(
"numbagg >= 0.2.1 is required for rolling_exp but currently numbagg is not installed"
)
elif pycompat.mod_version("numbagg") < Version("0.2.1"):
raise ImportError(
f"numbagg >= 0.2.1 is required for rolling_exp but currently version {pycompat.mod_version('numbagg')} is installed"
)
elif pycompat.mod_version("numbagg") < Version("0.3.1") and min_weight > 0:
raise ImportError(
f"numbagg >= 0.3.1 is required for `min_weight > 0` within `.rolling_exp` but currently version {pycompat.mod_version('numbagg')} is installed"
)

self.obj: T_DataWithCoords = obj
dim, window = next(iter(windows.items()))
Expand Down Expand Up @@ -192,10 +182,6 @@ def std(self) -> T_DataWithCoords:
Dimensions without coordinates: x
"""

if pycompat.mod_version("numbagg") < Version("0.4.0"):
raise ImportError(
f"numbagg >= 0.4.0 is required for rolling_exp().std(), currently {pycompat.mod_version('numbagg')} is installed"
)
import numbagg

dim_order = self.obj.dims
Expand Down Expand Up @@ -225,10 +211,6 @@ def var(self) -> T_DataWithCoords:
array([ nan, 0. , 0.46153846, 0.18461538, 0.06446281])
Dimensions without coordinates: x
"""
if pycompat.mod_version("numbagg") < Version("0.4.0"):
raise ImportError(
f"numbagg >= 0.4.0 is required for rolling_exp().var(), currently {pycompat.mod_version('numbagg')} is installed"
)
dim_order = self.obj.dims
import numbagg

Expand Down Expand Up @@ -258,10 +240,6 @@ def cov(self, other: T_DataWithCoords) -> T_DataWithCoords:
Dimensions without coordinates: x
"""

if pycompat.mod_version("numbagg") < Version("0.4.0"):
raise ImportError(
f"numbagg >= 0.4.0 is required for rolling_exp().cov(), currently {pycompat.mod_version('numbagg')} is installed"
)
dim_order = self.obj.dims
import numbagg

Expand Down Expand Up @@ -292,10 +270,6 @@ def corr(self, other: T_DataWithCoords) -> T_DataWithCoords:
Dimensions without coordinates: x
"""

if pycompat.mod_version("numbagg") < Version("0.4.0"):
raise ImportError(
f"numbagg >= 0.4.0 is required for rolling_exp().corr(), currently {pycompat.mod_version('numbagg')} is installed"
)
dim_order = self.obj.dims
import numbagg

Expand Down
7 changes: 0 additions & 7 deletions xarray/namedarray/daskmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from typing import TYPE_CHECKING, Any

import numpy as np
from packaging.version import Version

from xarray.core.indexing import ImplicitToExplicitIndexingAdapter
from xarray.namedarray.parallelcompat import ChunkManagerEntrypoint, T_ChunkedArray
Expand Down Expand Up @@ -182,14 +181,8 @@ def map_blocks(
new_axis: int | Sequence[int] | None = None,
**kwargs: Any,
) -> Any:
import dask
from dask.array import map_blocks

if drop_axis is None and Version(dask.__version__) < Version("2022.9.1"):
# See https://github.com/pydata/xarray/pull/7019#discussion_r1196729489
# TODO remove once dask minimum version >= 2022.9.1
drop_axis = []

# pass through name, meta, token as kwargs
return map_blocks(
func,
Expand Down
21 changes: 5 additions & 16 deletions xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def _importorskip(
has_zarr_v3, requires_zarr_v3 = _importorskip("zarr", "2.99")
has_fsspec, requires_fsspec = _importorskip("fsspec")
has_iris, requires_iris = _importorskip("iris")
has_numbagg, requires_numbagg = _importorskip("numbagg", "0.4.0")
has_numbagg, requires_numbagg = _importorskip("numbagg")
has_pyarrow, requires_pyarrow = _importorskip("pyarrow")
with warnings.catch_warnings():
warnings.filterwarnings(
Expand Down Expand Up @@ -157,34 +157,23 @@ def _importorskip(
has_array_api_strict, requires_array_api_strict = _importorskip("array_api_strict")


def _importorskip_h5netcdf_ros3():
try:
import h5netcdf

has_h5netcdf = True
except ImportError:
has_h5netcdf = False

def _importorskip_h5netcdf_ros3(has_h5netcdf: bool):
if not has_h5netcdf:
return has_h5netcdf, pytest.mark.skipif(
not has_h5netcdf, reason="requires h5netcdf"
)

h5netcdf_with_ros3 = Version(h5netcdf.__version__) >= Version("1.3.0")

import h5py

h5py_with_ros3 = h5py.get_config().ros3

has_h5netcdf_ros3 = h5netcdf_with_ros3 and h5py_with_ros3

return has_h5netcdf_ros3, pytest.mark.skipif(
not has_h5netcdf_ros3,
return h5py_with_ros3, pytest.mark.skipif(
not h5py_with_ros3,
reason="requires h5netcdf>=1.3.0 and h5py with ros3 support",
)


has_h5netcdf_ros3, requires_h5netcdf_ros3 = _importorskip_h5netcdf_ros3()
has_h5netcdf_ros3, requires_h5netcdf_ros3 = _importorskip_h5netcdf_ros3(has_h5netcdf)
has_netCDF4_1_6_2_or_above, requires_netCDF4_1_6_2_or_above = _importorskip(
"netCDF4", "1.6.2"
)
Expand Down
Loading