diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 86626098..40f05328 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,7 +5,7 @@ default_stages:
minimum_pre_commit_version: 2.9.3
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.8.1
+ rev: v0.9.1
hooks:
- id: ruff
types_or: [python, pyi, jupyter]
@@ -17,7 +17,7 @@ repos:
hooks:
- id: blacken-docs
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.13.0
+ rev: v1.14.1
hooks:
- id: mypy
additional_dependencies: [numpy, pandas, types-requests]
diff --git a/docs/_ext/typed_returns.py b/docs/_ext/typed_returns.py
index 8d43ba5c..2976077d 100644
--- a/docs/_ext/typed_returns.py
+++ b/docs/_ext/typed_returns.py
@@ -12,7 +12,7 @@ def _process_return(lines: Iterable[str]) -> Iterator[str]:
m = re.fullmatch(r"(?P\w+)\s+:\s+(?P[\w.]+)", line)
if m:
# Once this is in scanpydoc, we can use the fancy hover stuff
- yield f'**{m["param"]}** : :class:`~{m["type"]}`'
+ yield f"**{m['param']}** : :class:`~{m['type']}`"
else:
yield line
diff --git a/pyproject.toml b/pyproject.toml
index 4a00c13d..182db3bf 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -51,7 +51,7 @@ dependencies = [
"anndata>=0.9",
"cycler>=0.11.0",
"dask-image>=0.5.0",
- "dask[array]>=2021.02.0",
+ "dask[array]>=2021.02.0,<=2024.11.2",
"docrep>=0.3.1",
"fsspec>=2021.11.0",
"leidenalg>=0.8.2",
diff --git a/src/squidpy/datasets/_dataset.py b/src/squidpy/datasets/_dataset.py
index 83c5e361..4a7a4133 100644
--- a/src/squidpy/datasets/_dataset.py
+++ b/src/squidpy/datasets/_dataset.py
@@ -74,8 +74,7 @@
)
_slideseqv2 = AMetadata(
name="slideseqv2",
- doc_header="Pre-processed SlideseqV2 dataset from `Stickles et al "
- "`__.",
+ doc_header="Pre-processed SlideseqV2 dataset from `Stickles et al `__.",
shape=(41786, 4000),
url="https://ndownloader.figshare.com/files/28242783",
)
diff --git a/src/squidpy/gr/_build.py b/src/squidpy/gr/_build.py
index 0f14694c..2f001905 100644
--- a/src/squidpy/gr/_build.py
+++ b/src/squidpy/gr/_build.py
@@ -6,7 +6,7 @@
from collections.abc import Iterable # noqa: F401
from functools import partial
from itertools import chain
-from typing import Any
+from typing import Any, cast
import geopandas as gpd
import numpy as np
@@ -145,16 +145,16 @@ def spatial_neighbors(
- :attr:`anndata.AnnData.uns` ``['{{key_added}}']`` - :class:`dict` containing parameters.
"""
if isinstance(adata, SpatialData):
- assert (
- elements_to_coordinate_systems is not None
- ), "Since `adata` is a :class:`spatialdata.SpatialData`, `elements_to_coordinate_systems` must not be `None`."
- assert (
- table_key is not None
- ), "Since `adata` is a :class:`spatialdata.SpatialData`, `table_key` must not be `None`."
+ assert elements_to_coordinate_systems is not None, (
+ "Since `adata` is a :class:`spatialdata.SpatialData`, `elements_to_coordinate_systems` must not be `None`."
+ )
+ assert table_key is not None, (
+ "Since `adata` is a :class:`spatialdata.SpatialData`, `table_key` must not be `None`."
+ )
elements, table = match_element_to_table(adata, list(elements_to_coordinate_systems), table_key)
- assert table.obs_names.equals(
- adata.tables[table_key].obs_names
- ), "The spatialdata table must annotate all elements keys. Some elements are missing, please check the `elements_to_coordinate_systems` dictionary."
+ assert table.obs_names.equals(adata.tables[table_key].obs_names), (
+ "The spatialdata table must annotate all elements keys. Some elements are missing, please check the `elements_to_coordinate_systems` dictionary."
+ )
regions, region_key, instance_key = get_table_keys(adata.tables[table_key])
regions = [regions] if isinstance(regions, str) else regions
ordered_regions_in_table = adata.tables[table_key].obs[region_key].unique()
@@ -237,7 +237,7 @@ def spatial_neighbors(
for lib in libs:
ixs.extend(np.where(adata.obs[library_key] == lib)[0])
mats.append(_build_fun(adata[adata.obs[library_key] == lib]))
- ixs = np.argsort(ixs).tolist() # invert
+ ixs = cast(list[int], np.argsort(ixs).tolist())
Adj = block_diag([m[0] for m in mats], format="csr")[ixs, :][:, ixs]
Dst = block_diag([m[1] for m in mats], format="csr")[ixs, :][:, ixs]
else:
diff --git a/src/squidpy/gr/_ligrec.py b/src/squidpy/gr/_ligrec.py
index bedb7bad..d7766e5b 100644
--- a/src/squidpy/gr/_ligrec.py
+++ b/src/squidpy/gr/_ligrec.py
@@ -441,8 +441,7 @@ def test(
if corr_method is not None:
logg.info(
- f"Performing FDR correction across the `{corr_axis.v}` "
- f"using method `{corr_method}` at level `{alpha}`"
+ f"Performing FDR correction across the `{corr_axis.v}` using method `{corr_method}` at level `{alpha}`"
)
res["pvalues"] = _fdr_correct(res["pvalues"], corr_method, corr_axis, alpha=alpha)
diff --git a/src/squidpy/im/_container.py b/src/squidpy/im/_container.py
index a180d678..8da91d11 100644
--- a/src/squidpy/im/_container.py
+++ b/src/squidpy/im/_container.py
@@ -802,8 +802,7 @@ def generate_spot_crops(
obs_library_ids = list(adata.obs[library_id])
except KeyError:
logg.debug(
- f"Unable to find library ids in `adata.obs[{library_id!r}]`. "
- f"Trying in `adata.uns[{spatial_key!r}]`"
+ f"Unable to find library ids in `adata.obs[{library_id!r}]`. Trying in `adata.uns[{spatial_key!r}]`"
)
library_id = Key.uns.library_id(adata, spatial_key=spatial_key, library_id=library_id)
if not isinstance(library_id, str):
@@ -1622,7 +1621,7 @@ def _repr_html_(self) -> str:
)
s += "
"
if i == 9 and i < len(self) - 1: # show only first 10 layers
- s += f"and {len(self) - i - 1} more...
"
+ s += f"and {len(self) - i - 1} more...
"
break
return s
diff --git a/src/squidpy/pl/_ligrec.py b/src/squidpy/pl/_ligrec.py
index 18a55476..4ce5888c 100644
--- a/src/squidpy/pl/_ligrec.py
+++ b/src/squidpy/pl/_ligrec.py
@@ -41,8 +41,9 @@ def __init__(self, minn: float, delta: float, alpha: float | None, *args: Any, *
def _plot_size_legend(self, size_legend_ax: Axes) -> None:
y = self.BASE ** -((self.dot_max * self._delta) + self._minn)
x = self.BASE ** -((self.dot_min * self._delta) + self._minn)
- size_range = -(np.logspace(x, y, self.DEFAULT_NUM_LEGEND_DOTS + 1, base=10).astype(np.float64))
- size_range = (size_range - np.min(size_range)) / (np.max(size_range) - np.min(size_range))
+ size_range = -np.logspace(x, y, self.DEFAULT_NUM_LEGEND_DOTS + 1, base=10, dtype=np.float64)
+ size_range = ((size_range - np.min(size_range)) / (np.max(size_range) - np.min(size_range))).astype(np.float64)
+
# no point in showing dot of size 0
size_range = size_range[1:]
diff --git a/src/squidpy/pl/_utils.py b/src/squidpy/pl/_utils.py
index 98ac1590..6b16f6f9 100644
--- a/src/squidpy/pl/_utils.py
+++ b/src/squidpy/pl/_utils.py
@@ -178,9 +178,9 @@ def _warn_if_exists_obs(adata: AnnData, obs_key: str) -> None:
@njit(cache=True, fastmath=True)
def _point_inside_triangles(triangles: NDArrayA) -> np.bool_:
# modified from napari
- AB = triangles[:, 1, :] - triangles[:, 0, :]
- AC = triangles[:, 2, :] - triangles[:, 0, :]
- BC = triangles[:, 2, :] - triangles[:, 1, :]
+ AB: NDArrayA = triangles[:, 1, :] - triangles[:, 0, :]
+ AC: NDArrayA = triangles[:, 2, :] - triangles[:, 0, :]
+ BC: NDArrayA = triangles[:, 2, :] - triangles[:, 1, :]
s_AB = -AB[:, 0] * triangles[:, 0, 1] + AB[:, 1] * triangles[:, 0, 0] >= 0
s_AC = -AC[:, 0] * triangles[:, 0, 1] + AC[:, 1] * triangles[:, 0, 0] >= 0
diff --git a/src/squidpy/tl/_var_by_distance.py b/src/squidpy/tl/_var_by_distance.py
index 64ad814b..e845c0c2 100644
--- a/src/squidpy/tl/_var_by_distance.py
+++ b/src/squidpy/tl/_var_by_distance.py
@@ -3,7 +3,7 @@
from collections.abc import Iterator
from functools import reduce
from itertools import product
-from typing import Any, Union
+from typing import Any, Union, cast
import numpy as np
import pandas as pd
@@ -67,7 +67,10 @@ def var_by_distance(
elif isinstance(groups, list):
anchor = groups
elif isinstance(groups, np.ndarray):
- anchor = groups.tolist()
+ # can't be a 2D array
+ if groups.ndim != 1:
+ raise ValueError(f"Expected a 1D array for 'groups', but got shape {groups.shape}.")
+ anchor = cast(list[str], groups.astype(str).tolist())
else:
raise TypeError(f"Invalid type for groups: {type(groups)}.")