From 4a034b8400c5354dc7a902cc3afcd608c4ae7bc9 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Thu, 16 May 2024 14:43:31 +0200 Subject: [PATCH 1/9] Enable type hint checking --- .pre-commit-config.yaml | 8 ++++ benchmarks/asv_delegated_conda.py | 5 ++- benchmarks/benchmarks/cube.py | 2 +- .../experimental/ugrid/regions_combine.py | 2 +- benchmarks/benchmarks/load/__init__.py | 12 ++--- benchmarks/bm_runner.py | 18 ++++---- lib/iris/__init__.py | 8 +++- lib/iris/_lazy_data.py | 3 +- lib/iris/analysis/__init__.py | 2 +- lib/iris/analysis/maths.py | 2 +- lib/iris/common/metadata.py | 4 +- lib/iris/common/resolve.py | 12 ++--- lib/iris/coord_systems.py | 2 +- lib/iris/coords.py | 4 +- lib/iris/cube.py | 19 ++++---- lib/iris/experimental/ugrid/cf.py | 2 +- lib/iris/experimental/ugrid/mesh.py | 2 +- lib/iris/experimental/ugrid/utils.py | 18 ++++---- .../fileformats/_nc_load_rules/helpers.py | 24 +++++----- lib/iris/fileformats/cf.py | 2 +- lib/iris/fileformats/netcdf/__init__.py | 4 +- lib/iris/fileformats/netcdf/_dask_locks.py | 10 ++--- lib/iris/fileformats/netcdf/loader.py | 17 ++++--- lib/iris/fileformats/netcdf/saver.py | 9 ++-- lib/iris/fileformats/rules.py | 2 +- lib/iris/pandas.py | 4 +- lib/iris/plot.py | 4 +- lib/iris/tests/__init__.py | 7 ++- lib/iris/tests/graphics/__init__.py | 4 +- .../integration/test_netcdf__loadsaveattrs.py | 43 ++++++++++-------- lib/iris/tests/stock/_stock_2d_latlons.py | 6 +-- lib/iris/tests/stock/netcdf.py | 4 +- lib/iris/tests/test_constraints.py | 44 +++++++++---------- lib/iris/tests/test_plot.py | 2 +- .../analysis/maths/test__arith__meshcoords.py | 2 +- .../unit/common/metadata/test_CubeMetadata.py | 16 ++++--- lib/iris/tests/unit/concatenate/__init__.py | 6 +-- .../unit/concatenate/test__CoordSignature.py | 14 +++--- .../unit/cube/test_Cube__aggregated_by.py | 7 ++- .../geovista/test_cube_to_polydata.py | 2 +- .../nc_load_rules/actions/__init__.py | 8 ++-- .../actions/test__latlon_dimcoords.py | 6 ++- .../actions/test__time_coords.py | 2 +- .../fileformats/netcdf/saver/test_Saver.py | 12 ++--- .../netcdf/saver/test_Saver__lazy.py | 4 +- .../saver/test_Saver__lazy_stream_data.py | 5 ++- lib/iris/tests/unit/merge/test_ProtoCube.py | 8 ++-- lib/iris/tests/unit/plot/test_pcolormesh.py | 4 +- pyproject.toml | 11 +++++ 49 files changed, 239 insertions(+), 179 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fb33180953..c48c622b00 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -62,6 +62,14 @@ repos: - id: sort-all types: [file, python] +- repo: https://github.com/pre-commit/mirrors-mypy + rev: 'v1.9.0' + hooks: + - id: mypy + additional_dependencies: + - 'types-requests' + exclude: 'noxfile\.py|docs/src/conf\.py' + - repo: https://github.com/numpy/numpydoc rev: v1.7.0 hooks: diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index d53c111ab9..8cb084da1d 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -13,6 +13,7 @@ from pathlib import Path from shutil import copy2, copytree, rmtree from tempfile import TemporaryDirectory +from typing import Callable from asv import util as asv_util from asv.config import Config @@ -99,7 +100,7 @@ def name(self): def _update_info(self) -> None: """Make sure class properties reflect the actual environment being used.""" # Follow symlink if it has been created. - actual_path = Path(self._path).resolve() + actual_path = Path(self._path).resolve() # type: ignore[has-type] self._path = str(actual_path) # Get custom environment's Python version if it exists yet. @@ -132,7 +133,7 @@ def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: # happened. Also a non-issue when copying in the reverse # direction because the cache dir is temporary. if src_path.is_dir(): - func = copytree + func: Callable = copytree else: func = copy2 func(src_path, dst_path) diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index bc053e8301..2030547b46 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -55,7 +55,7 @@ def time_create(self, _, cube_creation_strategy: str) -> None: new_cube.attributes = self.cube_kwargs["attributes"] new_cube.cell_methods = self.cube_kwargs["cell_methods"] for coord, dims in self.cube_kwargs["dim_coords_and_dims"]: - coord: coords.DimCoord # Type hint to help linters. + assert isinstance(coord, coords.DimCoord) # Type hint to help linters. new_cube.add_dim_coord(coord, dims) for coord, dims in self.cube_kwargs["aux_coords_and_dims"]: new_cube.add_aux_coord(coord, dims) diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 409d6961c3..e2cd89018e 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -214,7 +214,7 @@ def track_filesize_saved(self, n_cubesphere): return os.path.getsize("tmp.nc") * 1.0e-6 -CombineRegionsSaveData.track_filesize_saved.unit = "Mb" +CombineRegionsSaveData.track_filesize_saved.unit = "Mb" # type: ignore[attr-defined] class CombineRegionsFileStreamedCalc(MixinCombineRegions): diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index 80d80df384..a4dfb40d19 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -15,16 +15,16 @@ class LoadAndRealise: # For data generation timeout = 600.0 - params = [ + params = ( [(50, 50, 2), (1280, 960, 5), (2, 2, 1000)], [False, True], ["FF", "PP", "NetCDF"], - ] + ) param_names = ["xyz", "compressed", "file_format"] def setup_cache(self) -> dict: file_type_args = self.params[2] - file_path_dict = {} + file_path_dict: dict[tuple[int, int, int], dict[bool, dict[str, str]]] = {} for xyz in self.params[0]: file_path_dict[xyz] = {} x, y, z = xyz @@ -59,7 +59,7 @@ def time_realise(self, _, __, ___, ____) -> None: class STASHConstraint: # xyz sizes mimic LoadAndRealise to maximise file reuse. - params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]] + params = ([(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]) param_names = ["xyz", "file_format"] def setup_cache(self) -> dict: @@ -78,7 +78,7 @@ def time_stash_constraint(self, _, __, ___) -> None: class TimeConstraint: - params = [[3, 20], ["FF", "PP", "NetCDF"]] + params = ([3, 20], ["FF", "PP", "NetCDF"]) param_names = ["time_dim_len", "file_format"] def setup_cache(self) -> dict: @@ -139,7 +139,7 @@ class StructuredFF: avoiding the cost of merging. """ - params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True]] + params = ([(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True]) param_names = ["xyz", "structured_loading"] def setup_cache(self) -> dict: diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 2c18de4a41..f5970c4968 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -129,7 +129,7 @@ def _setup_common() -> None: def _asv_compare(*commits: str, overnight_mode: bool = False) -> None: """Run through a list of commits comparing each one to the next.""" - commits = [commit[:8] for commit in commits] + commits = tuple(commit[:8] for commit in commits) for i in range(len(commits) - 1): before = commits[i] after = commits[i + 1] @@ -228,19 +228,19 @@ def _gh_create_reports(commit_sha: str, results_full: str, results_shifts: str) for login_type in ("author", "mergedBy"): gh_query = f'.["{login_type}"]["login"]' - command = shlex.split( + commandlist = shlex.split( f"gh pr view {pr_tag[1:]} " f"--json {login_type} -q '{gh_query}' " f"--repo {repo}" ) - login = _subprocess_runner_capture(command) + login = _subprocess_runner_capture(commandlist) - command = [ + commandlist = [ "curl", "-s", f"https://api.github.com/users/{login}", ] - login_info = _subprocess_runner_capture(command) + login_info = _subprocess_runner_capture(commandlist) is_user = '"type": "User"' in login_info if is_user: assignee = login @@ -306,7 +306,7 @@ class _SubParserGenerator(ABC): description: str = NotImplemented epilog: str = NotImplemented - def __init__(self, subparsers: ArgumentParser.add_subparsers) -> None: + def __init__(self, subparsers) -> None: self.subparser: ArgumentParser = subparsers.add_parser( self.name, description=self.description, @@ -469,10 +469,12 @@ def csperf(args: argparse.Namespace, run_type: Literal["cperf", "sperf"]) -> Non environ["ON_DEMAND_BENCHMARKS"] = "True" commit_range = "upstream/main^!" - asv_command = ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}" + asv_command_str = ( + ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}" + ) # Only do a single round. - asv_command = shlex.split(re.sub(r"rounds=\d", "rounds=1", asv_command)) + asv_command = shlex.split(re.sub(r"rounds=\d", "rounds=1", asv_command_str)) try: _subprocess_runner([*asv_command, *args.asv_args], asv=True) except subprocess.CalledProcessError as err: diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index f0421e9662..a06e36a2e2 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -94,6 +94,7 @@ def callback(cube, field, filename): import itertools import os.path import threading +from typing import Callable, Literal import iris._constraints import iris.config @@ -189,7 +190,7 @@ def __repr__(self): return msg.format(self.datum_support, self.pandas_ndim, self.save_split_attrs) # deprecated_options = {'example_future_flag': 'warning',} - deprecated_options = {} + deprecated_options: dict[str, Literal["error", "warning"]] = {} def __setattr__(self, name, value): if name in self.deprecated_options: @@ -248,7 +249,10 @@ def context(self, **kwargs): # Initialise the site configuration dictionary. #: Iris site configuration dictionary. -site_configuration = {} +site_configuration: dict[ + Literal["cf_profile", "cf_patch", "cf_patch_conventions"], + Callable | Literal[False] | None, +] = {} try: from iris.site_config import update as _update diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index b1f8e9aa85..cd093b315c 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -9,6 +9,7 @@ """ from functools import lru_cache, wraps +from types import ModuleType from typing import Sequence import dask @@ -376,7 +377,7 @@ def _combine( lazy = any(is_lazy_data(a) for a in arrays) masked = any(is_masked_data(a) for a in arrays) - array_module = np + array_module: ModuleType = np if masked: if lazy: # Avoid inconsistent array type when slicing resulting array diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index dc2a09d93e..5d44e563c0 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -1848,7 +1848,7 @@ def interp_order(length): and lazy data. """ -MAX_RUN.name = lambda: "max_run" +MAX_RUN.name = lambda: "max_run" # type: ignore[method-assign] GMEAN = Aggregator("geometric_mean", scipy.stats.mstats.gmean) diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 80d3ead90c..1ac29dd2ed 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -563,7 +563,7 @@ def power(data): else: - def power(data, out=None): + def power(data, out=None): # type: ignore[misc] return np.power(data, exponent, out) return _math_op_common( diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 9b0edf6532..e11ea71462 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -141,7 +141,7 @@ class BaseMetadata(metaclass=_NamedTupleMeta): DEFAULT_NAME = "unknown" # the fall-back name for metadata identity - _members = ( + _members: str | Iterable[str] = ( "standard_name", "long_name", "var_name", @@ -870,7 +870,7 @@ def equal(self, other, lenient=None): class CoordMetadata(BaseMetadata): """Metadata container for a :class:`~iris.coords.Coord`.""" - _members = ("coord_system", "climatological") + _members: str | Iterable[str] = ("coord_system", "climatological") __slots__ = () diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index aaf36b36cc..8b5f0cdc7f 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -31,7 +31,7 @@ _AuxCoverage = namedtuple( - "AuxCoverage", + "_AuxCoverage", [ "cube", "common_items_aux", @@ -45,18 +45,18 @@ ) _CategoryItems = namedtuple( - "CategoryItems", + "_CategoryItems", ["items_dim", "items_aux", "items_scalar"], ) _DimCoverage = namedtuple( - "DimCoverage", + "_DimCoverage", ["cube", "metadata", "coords", "dims_common", "dims_local", "dims_free"], ) -_Item = namedtuple("Item", ["metadata", "coord", "dims"]) +_Item = namedtuple("_Item", ["metadata", "coord", "dims"]) -_PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"]) +_PreparedFactory = namedtuple("_PreparedFactory", ["container", "dependencies"]) @dataclass @@ -95,7 +95,7 @@ def create_coord(self, metadata): return result -_PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"]) +_PreparedMetadata = namedtuple("_PreparedMetadata", ["combined", "src", "tgt"]) class Resolve: diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 96f39c3f4b..d96217bd92 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -48,7 +48,7 @@ def _float_or_None(arg): class CoordSystem(metaclass=ABCMeta): """Abstract base class for coordinate systems.""" - grid_mapping_name = None + grid_mapping_name: str | None = None def __eq__(self, other): """Override equality. diff --git a/lib/iris/coords.py b/lib/iris/coords.py index e32c6b0bf0..1acc7ae7e4 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2704,7 +2704,7 @@ def _new_points_requirements(self, points): emsg = "The {!r} {} points array must be strictly monotonic." raise ValueError(emsg.format(self.name(), self.__class__.__name__)) - @Coord._values.setter + @Coord._values.setter # type: ignore[attr-defined] def _values(self, points): # DimCoord always realises the points, to allow monotonicity checks. # Ensure it is an actual array, and also make our own copy so that we @@ -2796,7 +2796,7 @@ def _new_bounds_requirements(self, bounds): return bounds - @Coord.bounds.setter + @Coord.bounds.setter # type: ignore[attr-defined] def bounds(self, bounds): if bounds is not None: # Ensure we have a realised array of new bounds values. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 8418a630b5..95c6d2d49f 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -18,7 +18,6 @@ Mapping, MutableMapping, Optional, - Union, ) import warnings from xml.dom.minidom import Document @@ -789,7 +788,7 @@ class CubeAttrsDict(MutableMapping): def __init__( self, - combined: Optional[Union[Mapping, str]] = "__unspecified", + combined: Optional[Mapping] = None, locals: Optional[Mapping] = None, globals: Optional[Mapping] = None, ): @@ -837,10 +836,11 @@ def __init__( """ # First initialise locals + globals, defaulting to empty. - self.locals = locals - self.globals = globals + # See https://github.com/python/mypy/issues/3004 + self.locals = locals # type: ignore[assignment] + self.globals = globals # type: ignore[assignment] # Update with combined, if present. - if not isinstance(combined, str) or combined != "__unspecified": + if combined is not None: # Treat a single input with 'locals' and 'globals' properties as an # existing CubeAttrsDict, and update from its content. # N.B. enforce deep copying, consistent with general Iris usage. @@ -3958,14 +3958,16 @@ def __ne__(self, other): def __hash__(self): return hash(id(self)) - __add__ = iris.analysis.maths.add + def __add__(self, other): + return iris.analysis.maths.add(self, other) def __iadd__(self, other): return iris.analysis.maths.add(self, other, in_place=True) __radd__ = __add__ - __sub__ = iris.analysis.maths.subtract + def __sub__(self, other): + return iris.analysis.maths.subtract(self, other) def __isub__(self, other): return iris.analysis.maths.subtract(self, other, in_place=True) @@ -3973,7 +3975,8 @@ def __isub__(self, other): def __rsub__(self, other): return (-self) + other - __mul__ = iris.analysis.maths.multiply + def __mul__(self, other): + return iris.analysis.maths.multiply(self, other) def __imul__(self, other): return iris.analysis.maths.multiply(self, other, in_place=True) diff --git a/lib/iris/experimental/ugrid/cf.py b/lib/iris/experimental/ugrid/cf.py index 9a56045e67..281bdba878 100644 --- a/lib/iris/experimental/ugrid/cf.py +++ b/lib/iris/experimental/ugrid/cf.py @@ -283,7 +283,7 @@ class CFUGridReader(cf.CFReader): """ - _variable_types = cf.CFReader._variable_types + ( + _variable_types = cf.CFReader._variable_types + ( # type: ignore[assignment] CFUGridConnectivityVariable, CFUGridAuxiliaryCoordinateVariable, CFUGridMeshVariable, diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index a798f7af77..6fb1800b60 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -1974,7 +1974,7 @@ class _Mesh1DCoordinateManager: "node_x", "node_y", ) - OPTIONAL = ( + OPTIONAL: tuple[str, ...] = ( "edge_x", "edge_y", ) diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index dcf5462ad5..e86b003dc2 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -5,7 +5,8 @@ """Utility operations specific to unstructured data.""" -from typing import AnyStr, Iterable, Union +from collections.abc import Sequence +from typing import Union import dask.array as da import numpy as np @@ -15,8 +16,8 @@ def recombine_submeshes( mesh_cube: Cube, - submesh_cubes: Union[Iterable[Cube], Cube], - index_coord_name: AnyStr = "i_mesh_index", + submesh_cubes: Union[Sequence[Cube], Cube], + index_coord_name: str = "i_mesh_index", ) -> Cube: """Put data from sub-meshes back onto the original full mesh. @@ -51,7 +52,7 @@ def recombine_submeshes( its location in the original mesh -- i.e. they are indices into the relevant mesh-location dimension. - index_coord_name : Cube + index_coord_name : str Coord name of an index coord containing the mesh location indices, in every submesh cube. @@ -86,7 +87,7 @@ def recombine_submeshes( # # Perform consistency checks on all the region-cubes. # - if not isinstance(submesh_cubes, Iterable): + if not isinstance(submesh_cubes, Sequence): # Treat a single submesh cube input as a list-of-1. submesh_cubes = [submesh_cubes] @@ -94,11 +95,11 @@ def recombine_submeshes( result_dtype = None indexcoord_metadata = None for i_sub, cube in enumerate(submesh_cubes): - sub_str = f"Submesh cube #{i_sub + 1}/{len(submesh_cubes)}, " f'"{cube.name()}"' + sub_str = f'Submesh cube #{i_sub + 1}/{len(submesh_cubes)}, "{cube.name()}"' # Check dimensionality. if cube.ndim != mesh_cube.ndim: - err = ( + err: str | None = ( f"{sub_str} has {cube.ndim} dimensions, but " f"'mesh_cube' has {mesh_cube.ndim}." ) @@ -196,7 +197,8 @@ def recombine_submeshes( if indexcoord_metadata is None: # Store first occurrence (from first region-cube) indexcoord_metadata = sub_metadata - elif sub_metadata != indexcoord_metadata: + elif sub_metadata != indexcoord_metadata: # type: ignore[unreachable] + # This code is unreachable, is this a bug? # Compare subsequent occurrences (from other region-cubes) err = ( f"{sub_str} has an index coord " diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 43eed96fd5..7a7424d4f7 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -41,7 +41,6 @@ if TYPE_CHECKING: from numpy.ma import MaskedArray - from numpy.typing import ArrayLike from iris.fileformats.cf import CFBoundaryVariable @@ -1037,7 +1036,9 @@ def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): ################################################################################ def _normalise_bounds_units( - points_units: str, cf_bounds_var: CFBoundaryVariable, bounds_data: ArrayLike + points_units: str | None, + cf_bounds_var: CFBoundaryVariable, + bounds_data: MaskedArray, ) -> Optional[MaskedArray]: """Ensure bounds have units compatible with points. @@ -1064,26 +1065,27 @@ def _normalise_bounds_units( """ bounds_units = get_attr_units(cf_bounds_var, {}) + result: MaskedArray | None = bounds_data if bounds_units != UNKNOWN_UNIT_STRING: - points_units = cf_units.Unit(points_units) - bounds_units = cf_units.Unit(bounds_units) + p_units = cf_units.Unit(points_units) + b_units = cf_units.Unit(bounds_units) - if bounds_units != points_units: - if bounds_units.is_convertible(points_units): - bounds_data = bounds_units.convert(bounds_data, points_units) + if b_units != p_units: + if b_units.is_convertible(p_units): + result = b_units.convert(bounds_data, p_units) else: wmsg = ( f"Ignoring bounds on NetCDF variable {cf_bounds_var.cf_name!r}. " - f"Expected units compatible with {points_units.origin!r}, got " - f"{bounds_units.origin!r}." + f"Expected units compatible with {p_units.origin!r}, got " + f"{b_units.origin!r}." ) warnings.warn( wmsg, category=iris.warnings.IrisCfLoadWarning, stacklevel=2 ) - bounds_data = None + result = None - return bounds_data + return result ################################################################################ diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 0dc505d522..1017e19315 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -71,7 +71,7 @@ class CFVariable(metaclass=ABCMeta): #: Name of the netCDF variable attribute that identifies this #: CF-netCDF variable. - cf_identity = None + cf_identity: str | None = None def __init__(self, name, data): # Accessing the list of netCDF attributes is surprisingly slow. diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index e92b0ed4f8..6de6778416 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -11,10 +11,12 @@ """ +import logging + import iris.config # Note: *must* be done before importing from submodules, as they also use this ! -logger = iris.config.get_logger(__name__) +logger: logging.Logger = iris.config.get_logger(__name__) # Note: these probably shouldn't be public, but for now they are. from .._nc_load_rules.helpers import UnknownCellMethodWarning, parse_cell_methods diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py index 64d094e060..b677f1af28 100644 --- a/lib/iris/fileformats/netcdf/_dask_locks.py +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -127,14 +127,14 @@ def get_worker_lock(identity: str): """ scheduler_type = get_dask_array_scheduler_type() - if scheduler_type in ("threads", "single-threaded"): + if scheduler_type == "distributed": + from dask.distributed import Lock as DistributedLock + + lock: DistributedLock | threading.Lock = DistributedLock(identity) + elif scheduler_type in ("threads", "single-threaded"): # N.B. the "identity" string is never used in this case, as the same actual # lock object is used by all workers. lock = threading.Lock() - elif scheduler_type == "distributed": - from dask.distributed import Lock as DistributedLock - - lock = DistributedLock(identity) else: msg = ( "The configured dask array scheduler type is " diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 95cbc120f5..aa973aaecc 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -16,7 +16,7 @@ from copy import deepcopy from enum import Enum, auto import threading -from typing import Union +from typing import Iterator import warnings import numpy as np @@ -705,9 +705,9 @@ def __init__(self, var_dim_chunksizes=None): @contextmanager def set( self, - var_names: Union[str, Iterable[str]] = None, + var_names: str | Iterable[str] | None = None, **dimension_chunksizes: Mapping[str, int], - ) -> None: + ) -> Iterator[None]: r"""Control the Dask chunk sizes applied to NetCDF variables during loading. Parameters @@ -758,14 +758,17 @@ def set( # A specific name match should override a '*' setting, but # that is implemented elsewhere. if not isinstance(var_name, str): - msg = ( + msg = ( # type: ignore[unreachable] "'var_names' should be an iterable of strings, " f"not {var_names!r}." ) raise ValueError(msg) dim_chunks = self.var_dim_chunksizes.setdefault(var_name, {}) for dim_name, chunksize in dimension_chunksizes.items(): - if not (isinstance(dim_name, str) and isinstance(chunksize, int)): + if not ( + isinstance(dim_name, str) # type: ignore[redundant-expr] + and isinstance(chunksize, int) + ): msg = ( "'dimension_chunksizes' kwargs should be a dict " f"of `str: int` pairs, not {dimension_chunksizes!r}." @@ -778,7 +781,7 @@ def set( self.mode = old_mode @contextmanager - def from_file(self) -> None: + def from_file(self) -> Iterator[None]: r"""Ensure the chunk sizes are loaded in from NetCDF file variables. Raises @@ -801,7 +804,7 @@ def from_file(self) -> None: self.var_dim_chunksizes = old_var_dim_chunksizes @contextmanager - def as_dask(self) -> None: + def as_dask(self) -> Iterator[None]: """Rely on Dask :external+dask:doc:`array` to control chunk sizes. Notes diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 8d53a4d5be..00c91cf087 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -290,6 +290,7 @@ class VariableEmulator(typing.Protocol): """ _data_array: np.typing.ArrayLike + shape: tuple[int, ...] CFVariable = typing.Union[_thread_safe_nc.VariableWrapper, VariableEmulator] @@ -953,8 +954,8 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names): ] # Include any relevant mesh location coordinates. - mesh: Mesh = getattr(cube, "mesh") - mesh_location: str = getattr(cube, "location") + mesh: Mesh | None = getattr(cube, "mesh") + mesh_location: str | None = getattr(cube, "location") if mesh and mesh_location: location_coords: MeshNodeCoords | MeshEdgeCoords | MeshFaceCoords = getattr( mesh, f"{mesh_location}_coords" @@ -2290,7 +2291,7 @@ def _increment_name(self, varname): def _lazy_stream_data( self, - data: np.typing.ArrayLike, + data: np.ndarray | da.Array, cf_var: CFVariable, ) -> None: if hasattr(data, "shape") and data.shape == (1,) + cf_var.shape: @@ -2334,7 +2335,7 @@ def store( data: np.typing.ArrayLike, cf_var: CFVariable, ) -> None: - cf_var[:] = data + cf_var[:] = data # type: ignore[index] # Store the data. store(data, cf_var) diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index be04f0bb5d..c467c76f4b 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -254,7 +254,7 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): _loader_attrs = ("field_generator", "field_generator_kwargs", "converter") -class Loader(collections.namedtuple("Loader", _loader_attrs)): +class Loader(collections.namedtuple("Loader", _loader_attrs)): # type: ignore[misc] def __new__(cls, field_generator, field_generator_kwargs, converter): """Create a definition of a field-based Cube loader. diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 434e415f2d..dd0f9fec89 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -88,7 +88,7 @@ def _add_iris_coord(cube, name, points, dim, calendar=None): cube.add_aux_coord(coord, dim) -def _series_index_unique(pandas_series: pd.Series): +def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...] | None: """Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. Iterates through grouping single index levels, then combinations of 2 @@ -104,7 +104,7 @@ def _series_index_unique(pandas_series: pd.Series): levels_range = range(pandas_index.nlevels) if unique_number == 1: # Scalar - identical for all indices. - result = () + result: tuple[int, ...] | None = () else: result = None levels_combinations = chain( diff --git a/lib/iris/plot.py b/lib/iris/plot.py index a0c5f55274..f787ad4326 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -213,9 +213,7 @@ def sort_key(coord): def _can_draw_map(coords): - std_names = [ - c and c.standard_name for c in coords if isinstance(c, iris.coords.Coord) - ] + std_names = [c.standard_name for c in coords if isinstance(c, iris.coords.Coord)] valid_std_names = [ ["latitude", "longitude"], ["grid_latitude", "grid_longitude"], diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 77a9fcdd67..0a433b00d3 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -29,7 +29,6 @@ import shutil import subprocess import sys -from typing import AnyStr import unittest from unittest import mock import warnings @@ -92,7 +91,7 @@ sys.argv.remove("--data-files-used") fname = "/var/tmp/all_iris_test_resource_paths.txt" print("saving list of files used by tests to %s" % fname) - _EXPORT_DATAPATHS_FILE = open(fname, "w") + _EXPORT_DATAPATHS_FILE: io.TextIOWrapper | None = open(fname, "w") else: _EXPORT_DATAPATHS_FILE = None @@ -196,7 +195,7 @@ def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): class IrisTest(unittest.TestCase): """A subclass of unittest.TestCase which provides Iris specific testing functionality.""" - _assertion_counts = collections.defaultdict(int) + _assertion_counts: collections.defaultdict[str, int] = collections.defaultdict(int) def _assert_str_same( self, @@ -1053,7 +1052,7 @@ def wrapped(self, *args, **kwargs): return wrapped -def env_bin_path(exe_name: AnyStr = None): +def env_bin_path(exe_name: str | None = None) -> Path | None: """Return a Path object for (an executable in) the environment bin directory. Parameters diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index a1b6b24bcc..1fe199c8b7 100644 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -126,7 +126,7 @@ def repos_equal(repo1: Dict[str, str], repo2: Dict[str, str]) -> bool: return True -def get_phash(input: Path) -> str: +def get_phash(input: Path | io.BytesIO) -> str: import imagehash from PIL import Image @@ -141,7 +141,7 @@ def generate_repo_from_baselines(baseline_image_dir: Path) -> Dict[str, str]: return repo -def fully_qualify(test_id: str, repo: str) -> Dict[str, str]: +def fully_qualify(test_id: str, repo: dict[str, str]) -> str: # If the test_id isn't in the repo as it stands, look for it if test_id not in repo: test_id_candidates = [x for x in repo.keys() if x.endswith(test_id)] diff --git a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py index 991f0431a1..1eeb5c4f0e 100644 --- a/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py +++ b/lib/iris/tests/integration/test_netcdf__loadsaveattrs.py @@ -18,6 +18,7 @@ """ +from collections.abc import Sequence import inspect import json import os @@ -47,12 +48,11 @@ # A list of "global-style" attribute names : those which should be global attributes by # default (i.e. file- or group-level, *not* attached to a variable). -_GLOBAL_TEST_ATTRS = set(iris.fileformats.netcdf.saver._CF_GLOBAL_ATTRS) +_GLOBAL_TEST_ATTRS = sorted(iris.fileformats.netcdf.saver._CF_GLOBAL_ATTRS) # Remove this one, which has peculiar behaviour + is tested separately # N.B. this is not the same as 'Conventions', but is caught in the crossfire when that # one is processed. -_GLOBAL_TEST_ATTRS -= set(["conventions"]) -_GLOBAL_TEST_ATTRS = sorted(_GLOBAL_TEST_ATTRS) +_GLOBAL_TEST_ATTRS = [a for a in _GLOBAL_TEST_ATTRS if a != "conventions"] # Define a fixture to parametrise tests over the 'global-style' test attributes. @@ -95,7 +95,7 @@ def local_attr(request): def check_captured_warnings( - expected_keys: List[str], + expected_keys: List[str] | str | None, captured_warnings: List[warnings.WarningMessage], allow_possible_legacy_warning: bool = False, ): @@ -109,13 +109,14 @@ def check_captured_warnings( # TODO: when iris.FUTURE.save_split_attrs is removed, we can remove the # 'allow_possible_legacy_warning' arg. + if expected_keys == _SKIP_WARNCHECK: + # No check at all in this case + return + if expected_keys is None: expected_keys = [] - elif hasattr(expected_keys, "upper"): + elif isinstance(expected_keys, str): # Handle a single string - if expected_keys == _SKIP_WARNCHECK: - # No check at all in this case - return expected_keys = [expected_keys] if allow_possible_legacy_warning: @@ -126,14 +127,14 @@ def check_captured_warnings( ) expected_keys.append(legacy_message_key) - expected_keys = [re.compile(key) for key in expected_keys] + expected_patterns = [re.compile(key) for key in expected_keys] found_results = [str(warning.message) for warning in captured_warnings] - remaining_keys = expected_keys.copy() + remaining_keys = expected_patterns.copy() for i_message, message in enumerate(found_results.copy()): for key in remaining_keys: if key.search(message): # Hit : replace one message in the list with its matching "key" - found_results[i_message] = key + found_results[i_message] = key # type: ignore[call-overload] # remove the matching key remaining_keys.remove(key) # skip on to next message @@ -142,9 +143,11 @@ def check_captured_warnings( if allow_possible_legacy_warning: # Remove any unused "legacy attribute saving" key. # N.B. this is the *only* key we will tolerate not being used. - expected_keys = [key for key in expected_keys if key != legacy_message_key] + expected_patterns = [ + key for key in expected_patterns if key != legacy_message_key + ] - assert set(found_results) == set(expected_keys) + assert set(found_results) == set(expected_patterns) class MixinAttrsTesting: @@ -361,8 +364,8 @@ def run_testcase( def fetch_results( self, - filepath: str = None, - cubes: Iterable[Cube] = None, + filepath: str | None = None, + cubes: Iterable[Cube] | None = None, oldstyle_combined: bool = False, ): """Return testcase results from an output file or cubes in a standardised form. @@ -561,11 +564,11 @@ def decode_specstring(spec: str) -> List[Union[str, None]]: return result -def encode_matrix_result(results: List[List[str]]) -> List[str]: +def encode_matrix_result(results) -> List[str]: # Re-code a set of output results, [*[global-value, *local-values]] as a list of # strings, like ["GaL-b"] or ["GaLabc", "GbLabc"]. # N.B. again assuming that all values are just one-character strings, or None. - assert isinstance(results, Iterable) and len(results) >= 1 + assert isinstance(results, Sequence) and len(results) >= 1 if not isinstance(results[0], list): results = [results] assert all( @@ -1345,7 +1348,7 @@ def run_save_testcase(self, attr_name: str, values: list): self.captured_warnings = captured_warnings - def run_save_testcase_legacytype(self, attr_name: str, values: list): + def run_save_testcase_legacytype(self, attr_name: str, values): """Legacy-type means : before cubes had split attributes. This just means we have only one "set" of cubes, with ***no*** distinct global @@ -1357,7 +1360,9 @@ def run_save_testcase_legacytype(self, attr_name: str, values: list): self.run_save_testcase(attr_name, [None] + values) - def check_save_results(self, expected: list, expected_warnings: List[str] = None): + def check_save_results( + self, expected: list, expected_warnings: List[str] | None = None + ): results = self.fetch_results(filepath=self.result_filepath) assert results == expected check_captured_warnings( diff --git a/lib/iris/tests/stock/_stock_2d_latlons.py b/lib/iris/tests/stock/_stock_2d_latlons.py index c216629fcd..018b64a03a 100644 --- a/lib/iris/tests/stock/_stock_2d_latlons.py +++ b/lib/iris/tests/stock/_stock_2d_latlons.py @@ -305,9 +305,9 @@ def make_bounds_discontiguous_at_point(cube, at_iy, at_ix, in_y=False, upper=Tru x_coord = cube.coord(axis="x") y_coord = cube.coord(axis="y") assert x_coord.shape == y_coord.shape - assert ( - coord.bounds.ndim == 3 and coord.shape[-1] == 4 for coord in (x_coord, y_coord) - ) + for coord in (x_coord, y_coord): + assert coord.bounds.ndim == 3 + assert coord.bounds.shape[-1] == 4 # For both X and Y coord, move points + bounds to create a discontinuity. def adjust_coord(coord): diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index 5721433103..c063f3af23 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -7,7 +7,7 @@ from pathlib import Path from string import Template import subprocess -from typing import Optional +from typing import Any, Optional import dask from dask import array as da @@ -48,7 +48,7 @@ def ncgen_from_cdl(cdl_str: Optional[str], cdl_path: Optional[str], nc_path: str if cdl_path: # Create netcdf from stored CDL file. call_args = [NCGEN_PATHSTR, "-k3", "-o", nc_path, cdl_path] - call_kwargs = {} + call_kwargs: dict[str, Any] = {} else: # No CDL file : pipe 'cdl_str' directly into the ncgen program. if not cdl_str: diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index f7dab288b6..2e203e0185 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -152,7 +152,7 @@ def fixup_sigma_to_be_aux(cubes): sigma = iris.coords.AuxCoord.from_coord(sigma) cube.replace_coord(sigma) - def assertCML(self, cubes, filename): + def assertConstraintCML(self, cubes, filename): filename = "%s_%s.cml" % (filename, self.suffix) tests.IrisTest.assertCML(self, cubes, ("constrained_load", filename)) @@ -162,15 +162,15 @@ def load_match(self, files, constraints): def test_single_atomic_constraint(self): cubes = self.load_match(self.dec_path, self.level_10) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "all_10") + self.assertConstraintCML(cubes, "all_10") cubes = self.load_match(self.dec_path, self.theta) - self.assertCML(cubes, "theta") + self.assertConstraintCML(cubes, "theta") cubes = self.load_match(self.dec_path, self.model_level_number_10_22) self.fixup_sigma_to_be_aux(cubes) workaround_pending_1262(cubes) - self.assertCML(cubes, "all_ml_10_22") + self.assertConstraintCML(cubes, "all_ml_10_22") # Check that it didn't matter that we provided sets & tuples to the model_level for constraint in [ @@ -180,52 +180,52 @@ def test_single_atomic_constraint(self): cubes = self.load_match(self.dec_path, constraint) self.fixup_sigma_to_be_aux(cubes) workaround_pending_1262(cubes) - self.assertCML(cubes, "all_ml_10_22") + self.assertConstraintCML(cubes, "all_ml_10_22") def test_string_standard_name(self): cubes = self.load_match(self.dec_path, SN_AIR_POTENTIAL_TEMPERATURE) - self.assertCML(cubes, "theta") + self.assertConstraintCML(cubes, "theta") cubes = self.load_match(self.dec_path, [SN_AIR_POTENTIAL_TEMPERATURE]) - self.assertCML(cubes, "theta") + self.assertConstraintCML(cubes, "theta") cubes = self.load_match( self.dec_path, iris.Constraint(SN_AIR_POTENTIAL_TEMPERATURE) ) - self.assertCML(cubes, "theta") + self.assertConstraintCML(cubes, "theta") cubes = self.load_match( self.dec_path, iris.Constraint(SN_AIR_POTENTIAL_TEMPERATURE, model_level_number=10), ) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_10") + self.assertConstraintCML(cubes, "theta_10") def test_latitude_constraint(self): cubes = self.load_match(self.theta_path, self.lat_30) - self.assertCML(cubes, "theta_lat_30") + self.assertConstraintCML(cubes, "theta_lat_30") cubes = self.load_match(self.theta_path, self.lat_gt_45) - self.assertCML(cubes, "theta_lat_gt_30") + self.assertConstraintCML(cubes, "theta_lat_gt_30") def test_single_expression_constraint(self): cubes = self.load_match(self.theta_path, self.theta & self.level_10) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_10") + self.assertConstraintCML(cubes, "theta_10") cubes = self.load_match(self.theta_path, self.level_10 & self.theta) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_10") + self.assertConstraintCML(cubes, "theta_10") def test_dual_atomic_constraint(self): cubes = self.load_match(self.dec_path, [self.theta, self.level_10]) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_and_all_10") + self.assertConstraintCML(cubes, "theta_and_all_10") def test_dual_repeated_constraint(self): cubes = self.load_match(self.dec_path, [self.theta, self.theta]) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_and_theta") + self.assertConstraintCML(cubes, "theta_and_theta") def test_dual_expression_constraint(self): cubes = self.load_match( @@ -233,28 +233,28 @@ def test_dual_expression_constraint(self): [self.theta & self.level_10, self.level_gt_30_le_3 & self.theta], ) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_10_and_theta_level_gt_30_le_3") + self.assertConstraintCML(cubes, "theta_10_and_theta_level_gt_30_le_3") def test_invalid_constraint(self): cubes = self.load_match(self.theta_path, self.pressure_950) - self.assertCML(cubes, "pressure_950") + self.assertConstraintCML(cubes, "pressure_950") cubes = self.load_match(self.theta_path, self.invalid_inequality) - self.assertCML(cubes, "invalid_inequality") + self.assertConstraintCML(cubes, "invalid_inequality") def test_inequality_constraint(self): cubes = self.load_match(self.theta_path, self.level_gt_30_le_3) - self.assertCML(cubes, "theta_gt_30_le_3") + self.assertConstraintCML(cubes, "theta_gt_30_le_3") class StrictConstraintMixin(RelaxedConstraintMixin): def test_single_atomic_constraint(self): cubes = self.load_match(self.theta_path, self.theta) - self.assertCML(cubes, "theta") + self.assertConstraintCML(cubes, "theta") cubes = self.load_match(self.theta_path, self.level_10) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_10") + self.assertConstraintCML(cubes, "theta_10") def test_invalid_constraint(self): with self.assertRaises(iris.exceptions.ConstraintMismatchError): @@ -263,7 +263,7 @@ def test_invalid_constraint(self): def test_dual_atomic_constraint(self): cubes = self.load_match(self.dec_path, [self.theta, self.level_10 & self.theta]) self.fixup_sigma_to_be_aux(cubes) - self.assertCML(cubes, "theta_and_theta_10") + self.assertConstraintCML(cubes, "theta_and_theta_10") @tests.skip_data diff --git a/lib/iris/tests/test_plot.py b/lib/iris/tests/test_plot.py index 64f128c408..50773f0d24 100644 --- a/lib/iris/tests/test_plot.py +++ b/lib/iris/tests/test_plot.py @@ -780,7 +780,7 @@ def setUp(self): self.draw_method = qplt.plot -_load_cube_once_cache = {} +_load_cube_once_cache: dict[tuple[str, str], iris.cube.Cube] = {} def load_cube_once(filename, constraint): diff --git a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py index 7275bfa1d3..58af4364d4 100644 --- a/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py +++ b/lib/iris/tests/unit/analysis/maths/test__arith__meshcoords.py @@ -44,7 +44,7 @@ class MeshLocationsMixin: # Modify the inherited data operation, to test with a mesh-cube. # Also, optionally, test with derived coordinates. - def _base_testcube(self): + def _base_testcube(self, include_derived=False): cube = super()._base_testcube(include_derived=self.use_derived_coords) cube = _convert_to_meshcube(cube) self.cube_xy_dimcoords = ["i_mesh_face"] diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 92af65da5c..c1846fb976 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -6,6 +6,8 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. +from typing import Any + import iris.tests as tests # isort:skip from copy import deepcopy @@ -90,7 +92,7 @@ def test_bases(self): self.assertTrue(issubclass(self.cls, BaseMetadata)) -@pytest.fixture(params=CubeMetadata._fields) +@pytest.fixture(params=CubeMetadata._fields) # type: ignore[attr-defined] def fieldname(request): """Parametrize testing over all CubeMetadata field names.""" return request.param @@ -140,7 +142,7 @@ def order_reversed(request): # N.B. the *same* results should also apply when left+right are swapped, with a suitable # adjustment to the result value. Likewise, results should be the same for either # global- or local-style attributes. -_ALL_RESULTS = { +_ALL_RESULTS: dict[str, dict[str, dict[str, Any]]] = { "equal": { "primaryAA": {"lenient": True, "strict": True}, "primaryAX": {"lenient": True, "strict": False}, @@ -302,7 +304,7 @@ def check_splitattrs_testcase( CubeMetadata( **{ field: attrs if field == "attributes" else None - for field in CubeMetadata._fields + for field in CubeMetadata._fields # type: ignore[attr-defined] } ) for attrs in input_dicts @@ -319,7 +321,7 @@ def check_splitattrs_testcase( # Adjust the result of a "reversed" operation to the 'normal' way round. # ( N.B. only "difference" results are affected by reversal. ) if isinstance(result, CubeMetadata): - result = result._replace(attributes=result.attributes[::-1]) + result = result._replace(attributes=result.attributes[::-1]) # type: ignore[attr-defined] # Extract, from the operation result, the value to be tested against "expected". result = extract_result_value(result, check_global_not_local) @@ -330,7 +332,9 @@ def check_splitattrs_testcase( expected = _ALL_RESULTS[operation_name][primary_key][which] if operation_name == "equal" and expected: # Account for the equality cases made `False` by mismatched secondary values. - left, right = secondary_inputs + left, right = list( + secondary_inputs + ) # see https://github.com/python/mypy/issues/13823 secondaries_same = left == right or (check_is_lenient and "X" in (left, right)) if not secondaries_same: expected = False @@ -347,7 +351,7 @@ class MixinSplitattrsMatrixTests: """ # Define the operation name : set in each inheritor - operation_name = None + operation_name: str def test_splitattrs_cases( self, diff --git a/lib/iris/tests/unit/concatenate/__init__.py b/lib/iris/tests/unit/concatenate/__init__.py index a99b5790d7..7b8c75d6ab 100644 --- a/lib/iris/tests/unit/concatenate/__init__.py +++ b/lib/iris/tests/unit/concatenate/__init__.py @@ -60,9 +60,9 @@ class MetaDataItem: def create_metadata( dim_coord: bool = True, scalar: bool = False, - order: int = None, + order: int | None = None, circular: bool | None = False, - coord_dtype: np.dtype = None, + coord_dtype=None, lazy: bool = True, with_bounds: bool | None = False, ) -> MetaDataItem: @@ -121,7 +121,7 @@ def create_metadata( coord.metadata = iris.common.CoordMetadata.from_metadata(metadata) dims = tuple([dim for dim in range(coord.ndim)]) - kwargs = {"scalar": scalar} + kwargs: dict[str, Any] = {"scalar": scalar} if dim_coord: kwargs["circular"] = circular diff --git a/lib/iris/tests/unit/concatenate/test__CoordSignature.py b/lib/iris/tests/unit/concatenate/test__CoordSignature.py index 0d91b1883b..ebce624bae 100644 --- a/lib/iris/tests/unit/concatenate/test__CoordSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CoordSignature.py @@ -33,18 +33,18 @@ class MockCubeSignature: cell_measures_and_dims: bool | None = None ancillary_variables_and_dims: bool | None = None derived_coords_and_dims: bool | None = None - dim_coords: list[DimCoord, ...] = field(default_factory=list) + dim_coords: list[DimCoord] = field(default_factory=list) dim_mapping: bool | None = None - dim_extents: list[_Extent, ...] = field(default_factory=list) - dim_order: list[int, ...] = field(default_factory=list) - dim_metadata: list[_CoordMetaData, ...] = field(default_factory=list) + dim_extents: list[_Extent] = field(default_factory=list) + dim_order: list[int] = field(default_factory=list) + dim_metadata: list[_CoordMetaData] = field(default_factory=list) @pytest.mark.parametrize("order", [_DECREASING, _INCREASING]) @pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) @pytest.mark.parametrize("lazy", [False, True]) @pytest.mark.parametrize("with_bounds", [False, True]) -def test_dim(order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool) -> None: +def test_dim(order: int, coord_dtype, lazy: bool, with_bounds: bool) -> None: """Test extent calculation of vector dimension coordinates.""" metadata = create_metadata( dim_coord=True, @@ -54,6 +54,7 @@ def test_dim(order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool) - lazy=lazy, with_bounds=with_bounds, ) + assert isinstance(metadata.coord, DimCoord) # Type hint for linters. dim_metadata = [_CoordMetaData(metadata.coord, metadata.dims)] cube_signature = MockCubeSignature( dim_coords=[metadata.coord], dim_metadata=dim_metadata @@ -86,7 +87,7 @@ def test_dim(order: int, coord_dtype: np.dtype, lazy: bool, with_bounds: bool) - @pytest.mark.parametrize("coord_dtype", [np.int32, np.float32]) @pytest.mark.parametrize("lazy", [False, True]) @pytest.mark.parametrize("with_bounds", [False, True]) -def test_dim__scalar(coord_dtype: np.dtype, lazy: bool, with_bounds: bool) -> None: +def test_dim__scalar(coord_dtype, lazy: bool, with_bounds: bool) -> None: """Test extent calculation of scalar dimension coordinates.""" metadata = create_metadata( dim_coord=True, @@ -96,6 +97,7 @@ def test_dim__scalar(coord_dtype: np.dtype, lazy: bool, with_bounds: bool) -> No lazy=lazy, with_bounds=with_bounds, ) + assert isinstance(metadata.coord, DimCoord) # Hint for mypy. dim_metadata = [_CoordMetaData(metadata.coord, metadata.dims)] cube_signature = MockCubeSignature( dim_coords=[metadata.coord], dim_metadata=dim_metadata diff --git a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py index 64c99ebd4b..878183139a 100644 --- a/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py +++ b/lib/iris/tests/unit/cube/test_Cube__aggregated_by.py @@ -581,11 +581,14 @@ def get_result( if second_categorised: categorised_coord2 = AuxCoord(np.tile([0, 1, 2, 3, 4], 4), long_name="cat2") - categorised_coords = [categorised_coord1, categorised_coord2] + categorised_coords: AuxCoord | list[AuxCoord] = [ + categorised_coord1, + categorised_coord2, + ] else: categorised_coords = categorised_coord1 - aux_coords_and_dims = [ + aux_coords_and_dims: list[tuple[AuxCoord, int | tuple[int, ...]]] = [ (categorised_coord1, axes[0]), ] diff --git a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py index 1394465fb4..6b40c71ff4 100644 --- a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py +++ b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py @@ -43,7 +43,7 @@ def default_cs(): class ParentClass: - MOCKED_OPERATION = NotImplemented + MOCKED_OPERATION: str @pytest.fixture() def expected(self): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 9d1c1d742a..f768ae3587 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -53,7 +53,7 @@ class Mixin__nc_load_actions: """ # "global" test setting : whether to output various debug info - debug = False + debug_info = False @classmethod def setUpClass(cls): @@ -85,7 +85,7 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # If debug enabled, switch on the activation summary debug output. # Use 'patch' so it is restored after the test. - self.patch("iris.fileformats.netcdf.loader.DEBUG", self.debug) + self.patch("iris.fileformats.netcdf.loader.DEBUG", self.debug_info) with warnings.catch_warnings(): warnings.filterwarnings( @@ -125,7 +125,7 @@ def run_testcase(self, warning_regex=None, **testcase_kwargs): nc_path = cdl_path.replace(".cdl", ".nc") cdl_string = self._make_testcase_cdl(**testcase_kwargs) - if self.debug: + if self.debug_info: print("CDL file content:") print(cdl_string) print("------\n") @@ -137,7 +137,7 @@ def run_testcase(self, warning_regex=None, **testcase_kwargs): with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) - if self.debug: + if self.debug_info: print("\nCube:") print(cube) print("") diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py index 499088a802..fcaa67c308 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py @@ -10,6 +10,8 @@ """ +from typing import Literal + import iris.tests as tests # isort: skip from iris.coord_systems import GeogCS, RotatedGeogCS @@ -21,7 +23,7 @@ class Mixin_latlon_dimcoords(Mixin__nc_load_actions): # Control to test either longitude or latitude coords. # Set by inheritor classes, which are actual TestCases. - lat_1_or_lon_0 = None + lat_1_or_lon_0: Literal[0, 1] def setUp(self): super().setUp() @@ -130,7 +132,7 @@ def check_result( # It should also be a dim-coord self.assertEqual(1, len(cube.coords(dim_coords=True))) (coord,) = coords - if self.debug: + if self.debug_info: print("") print("DEBUG : result coord =", coord) print("") diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index c19dffd6e2..50e992dd50 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -216,7 +216,7 @@ def check_result(self, cube, time_is="dim", period_is="missing"): class Mixin__singlecoord__tests(Mixin__timecoords__common): # Coordinate tests to be run for both 'time' and 'period' coordinate vars. # Set (in inheritors) to select time/period testing. - which = None + which: str def run_testcase(self, coord_dim_name=None, **opts): """Specialise 'run_testcase' for single-coord 'time' or 'period' testing.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index f067993bed..1c57323301 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -6,6 +6,8 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. +from types import ModuleType + import iris.tests as tests # isort:skip import collections @@ -43,7 +45,7 @@ class Test_write(tests.IrisTest): # ------------------------------------------------------------------------- # Attribute is substituted in test_Saver__lazy. - array_lib = np + array_lib: ModuleType = np def _transverse_mercator_cube(self, ellipsoid=None): data = self.array_lib.arange(12).reshape(3, 4) @@ -397,7 +399,7 @@ class Test_write__valid_x_cube_attributes(tests.IrisTest): """Testing valid_range, valid_min and valid_max attributes.""" # Attribute is substituted in test_Saver__lazy. - array_lib = np + array_lib: ModuleType = np def test_valid_range_saved(self): cube = tests.stock.lat_lon_cube() @@ -441,7 +443,7 @@ class Test_write__valid_x_coord_attributes(tests.IrisTest): """Testing valid_range, valid_min and valid_max attributes.""" # Attribute is substituted in test_Saver__lazy. - array_lib = np + array_lib: ModuleType = np def test_valid_range_saved(self): cube = tests.stock.lat_lon_cube() @@ -483,7 +485,7 @@ def test_valid_max_saved(self): class Test_write_fill_value(tests.IrisTest): # Attribute is substituted in test_Saver__lazy. - array_lib = np + array_lib: ModuleType = np def _make_cube(self, dtype, masked_value=None, masked_index=None): data = self.array_lib.arange(12, dtype=dtype).reshape(3, 4) @@ -571,7 +573,7 @@ def test_no_hyphen(self): class _Common__check_attribute_compliance: # Attribute is substituted in test_Saver__lazy. - array_lib = np + array_lib: ModuleType = np def setUp(self): self.container = mock.Mock(name="container", attributes={}) diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index c22b24eedc..5b04b3b042 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -6,6 +6,8 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. +from types import ModuleType + import iris.tests as tests # isort:skip from dask import array as da @@ -17,7 +19,7 @@ class LazyMixin(tests.IrisTest): - array_lib = da + array_lib: ModuleType = da def result_path(self, basename=None, ext=""): # Precisely mirroring the tests in test_Saver, so use those CDL's. diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index e02f6b16c8..7c884e4c22 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -10,6 +10,7 @@ """ +from collections.abc import Iterator from unittest import mock import dask.array as da @@ -42,13 +43,13 @@ def saver_patch(): # should make ***no difference to any of these tests***. @staticmethod @pytest.fixture(params=[False, True], ids=["nocompute", "compute"]) - def compute(request) -> bool: + def compute(request) -> Iterator[bool]: yield request.param # A fixture to parametrise tests over real and lazy-type data. @staticmethod @pytest.fixture(params=["realdata", "lazydata", "emulateddata"]) - def data_form(request) -> bool: + def data_form(request) -> Iterator[bool]: yield request.param @staticmethod diff --git a/lib/iris/tests/unit/merge/test_ProtoCube.py b/lib/iris/tests/unit/merge/test_ProtoCube.py index 6174e6a9d8..43fc2478c8 100644 --- a/lib/iris/tests/unit/merge/test_ProtoCube.py +++ b/lib/iris/tests/unit/merge/test_ProtoCube.py @@ -239,7 +239,7 @@ class _MergeTest: # A mixin test class for common test methods implementation. # used by check routine: inheritors must implement it - _mergetest_type = NotImplementedError + _mergetest_type: str def check_merge_fails_with_message(self): proto_cube = iris._merge.ProtoCube(self.cube1) @@ -382,7 +382,7 @@ class _MergeTest_coordprops(_MergeTest): # A mixin test class for common coordinate properties tests. # This must be implemented by inheritors. - _mergetest_type = NotImplementedError + _mergetest_type: str def test_nochange(self): # This should simply succeed. @@ -444,8 +444,8 @@ def setUp(self): class _MergeTest_coordprops_vect(_MergeTest_coordprops): # A derived mixin test class. # Adds extra props test for aux+dim coords (test points, bounds + dims) - _mergetest_type = NotImplementedError - _coord_typename = NotImplementedError + _mergetest_type: str + _coord_typename: str def test_points(self): self.coord_to_change.points = self.coord_to_change.points + 1.0 diff --git a/lib/iris/tests/unit/plot/test_pcolormesh.py b/lib/iris/tests/unit/plot/test_pcolormesh.py index a5525770f2..dba3cce5c0 100644 --- a/lib/iris/tests/unit/plot/test_pcolormesh.py +++ b/lib/iris/tests/unit/plot/test_pcolormesh.py @@ -6,6 +6,8 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. +from typing import Any + import iris.tests as tests # isort:skip from iris.tests.unit.plot import TestGraphicStringCoord @@ -39,7 +41,7 @@ def blockplot_func(self): @tests.skip_plot class Test2dContigTol(tests.IrisTest, Mixin2dCoordsContigTol): # Extra call kwargs expected -- unlike 'pcolor', there are none. - additional_kwargs = {} + additional_kwargs: dict[str, Any] = {} def blockplot_func(self): return PLOT_FUNCTION_TO_TEST diff --git a/pyproject.toml b/pyproject.toml index 82f7017f59..069bbd811f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -162,6 +162,17 @@ ignore = [ "lib/iris/std_names.py", ] +[tool.mypy] +# See https://mypy.readthedocs.io/en/stable/config_file.html +ignore_missing_imports = true +warn_unused_configs = true +warn_unreachable = true +enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] +exclude = [ + 'noxfile\.py', + 'docs/src/conf\.py' +] + [tool.numpydoc_validation] checks = [ "all", # Enable all numpydoc validation rules, apart from the following: From 95b8256ab8adea447bd328c0ac38570cc669dab4 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Thu, 16 May 2024 16:15:46 +0200 Subject: [PATCH 2/9] Add whatsnew entry --- docs/src/whatsnew/latest.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 96ed6870e6..f673cb06af 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -105,6 +105,7 @@ This document explains the changes made to Iris for this release #. `@trexfeathers`_ made a Nox `benchmarks` session as the recommended entry point for running benchmarks. (:pull:`5951`) +#. `@bouweandela`_ enabled mypy checks for type hints. (:pull:`5956`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, From af76a1e73100c78cbadc562751b7630ea13aac09 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Wed, 22 May 2024 09:01:37 +0200 Subject: [PATCH 3/9] Add class variable annotation --- lib/iris/coord_systems.py | 3 ++- lib/iris/fileformats/cf.py | 3 ++- .../tests/unit/common/metadata/test_CubeMetadata.py | 4 ++-- .../experimental/geovista/test_cube_to_polydata.py | 3 ++- .../nc_load_rules/actions/test__time_coords.py | 4 +++- lib/iris/tests/unit/merge/test_ProtoCube.py | 10 ++++++---- 6 files changed, 17 insertions(+), 10 deletions(-) diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index d96217bd92..4da46ae249 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -7,6 +7,7 @@ from abc import ABCMeta, abstractmethod from functools import cached_property import re +from typing import ClassVar import warnings import cartopy.crs as ccrs @@ -48,7 +49,7 @@ def _float_or_None(arg): class CoordSystem(metaclass=ABCMeta): """Abstract base class for coordinate systems.""" - grid_mapping_name: str | None = None + grid_mapping_name: ClassVar[str | None] = None def __eq__(self, other): """Override equality. diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 1017e19315..3247aa1960 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -18,6 +18,7 @@ from collections.abc import Iterable, MutableMapping import os import re +from typing import ClassVar import warnings import numpy as np @@ -71,7 +72,7 @@ class CFVariable(metaclass=ABCMeta): #: Name of the netCDF variable attribute that identifies this #: CF-netCDF variable. - cf_identity: str | None = None + cf_identity: ClassVar[str | None] = None def __init__(self, name, data): # Accessing the list of netCDF attributes is surprisingly slow. diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index c1846fb976..7d51cbfb37 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -6,7 +6,7 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. -from typing import Any +from typing import Any, ClassVar import iris.tests as tests # isort:skip @@ -351,7 +351,7 @@ class MixinSplitattrsMatrixTests: """ # Define the operation name : set in each inheritor - operation_name: str + operation_name: ClassVar[str] def test_splitattrs_cases( self, diff --git a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py index 6b40c71ff4..bf3132cb74 100644 --- a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py +++ b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py @@ -4,6 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.experimental.geovista.cube_to_polydata` function.""" +from typing import ClassVar from unittest.mock import Mock from geovista import Transform @@ -43,7 +44,7 @@ def default_cs(): class ParentClass: - MOCKED_OPERATION: str + MOCKED_OPERATION: ClassVar[str] @pytest.fixture() def expected(self): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index 50e992dd50..4e58c3b1d3 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -9,6 +9,8 @@ """ +from typing import ClassVar + import iris.tests as tests # isort: skip from iris.coords import AuxCoord, DimCoord @@ -216,7 +218,7 @@ def check_result(self, cube, time_is="dim", period_is="missing"): class Mixin__singlecoord__tests(Mixin__timecoords__common): # Coordinate tests to be run for both 'time' and 'period' coordinate vars. # Set (in inheritors) to select time/period testing. - which: str + which: ClassVar[str] def run_testcase(self, coord_dim_name=None, **opts): """Specialise 'run_testcase' for single-coord 'time' or 'period' testing.""" diff --git a/lib/iris/tests/unit/merge/test_ProtoCube.py b/lib/iris/tests/unit/merge/test_ProtoCube.py index 43fc2478c8..60726f99e9 100644 --- a/lib/iris/tests/unit/merge/test_ProtoCube.py +++ b/lib/iris/tests/unit/merge/test_ProtoCube.py @@ -6,6 +6,8 @@ # Import iris.tests first so that some things can be initialised before # importing anything else. +from typing import ClassVar + import iris.tests as tests # isort:skip from abc import ABCMeta, abstractmethod @@ -239,7 +241,7 @@ class _MergeTest: # A mixin test class for common test methods implementation. # used by check routine: inheritors must implement it - _mergetest_type: str + _mergetest_type: ClassVar[str] def check_merge_fails_with_message(self): proto_cube = iris._merge.ProtoCube(self.cube1) @@ -382,7 +384,7 @@ class _MergeTest_coordprops(_MergeTest): # A mixin test class for common coordinate properties tests. # This must be implemented by inheritors. - _mergetest_type: str + _mergetest_type: ClassVar[str] def test_nochange(self): # This should simply succeed. @@ -444,8 +446,8 @@ def setUp(self): class _MergeTest_coordprops_vect(_MergeTest_coordprops): # A derived mixin test class. # Adds extra props test for aux+dim coords (test points, bounds + dims) - _mergetest_type: str - _coord_typename: str + _mergetest_type: ClassVar[str] + _coord_typename: ClassVar[str] def test_points(self): self.coord_to_change.points = self.coord_to_change.points + 1.0 From e59e7213a2babdbcace92b16cab4b2b6aa8e93eb Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Wed, 22 May 2024 09:23:48 +0200 Subject: [PATCH 4/9] Update line numbers in doctests --- docs/src/further_topics/filtering_warnings.rst | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index 5175475922..ef8701f951 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) - iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) :: @@ -125,16 +125,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=444) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=445) ... my_operation() ... - iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: - python -W ignore:::iris.coord_systems:444 - export PYTHONWARNINGS=ignore:::iris.coord_systems:444 + python -W ignore:::iris.coord_systems:445 + export PYTHONWARNINGS=ignore:::iris.coord_systems:445 Warnings from a Common Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -188,7 +188,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) ---- From 5a286e8a636070d6060682ec404fd49e4f36ccc2 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Wed, 26 Jun 2024 12:42:30 +0200 Subject: [PATCH 5/9] Implement suggestions from code review --- benchmarks/asv_delegated_conda.py | 3 ++- lib/iris/analysis/maths.py | 4 ++-- lib/iris/experimental/ugrid/utils.py | 5 +++-- lib/iris/fileformats/netcdf/loader.py | 8 ++------ lib/iris/fileformats/netcdf/saver.py | 2 +- lib/iris/fileformats/rules.py | 10 ++++++---- pyproject.toml | 2 +- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index 8cb084da1d..f811f57427 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -100,7 +100,8 @@ def name(self): def _update_info(self) -> None: """Make sure class properties reflect the actual environment being used.""" # Follow symlink if it has been created. - actual_path = Path(self._path).resolve() # type: ignore[has-type] + self._path: str + actual_path = Path(self._path).resolve() self._path = str(actual_path) # Get custom environment's Python version if it exists yet. diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 1ac29dd2ed..bd20b26019 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -558,12 +558,12 @@ def exponentiate(cube, exponent, in_place=False): ) if cube.has_lazy_data(): - def power(data): + def power(data, out=None): return operator.pow(data, exponent) else: - def power(data, out=None): # type: ignore[misc] + def power(data, out=None): return np.power(data, exponent, out) return _math_op_common( diff --git a/lib/iris/experimental/ugrid/utils.py b/lib/iris/experimental/ugrid/utils.py index e86b003dc2..0074619bf2 100644 --- a/lib/iris/experimental/ugrid/utils.py +++ b/lib/iris/experimental/ugrid/utils.py @@ -11,6 +11,7 @@ import dask.array as da import numpy as np +from iris.common.metadata import CoordMetadata from iris.cube import Cube @@ -93,7 +94,7 @@ def recombine_submeshes( result_metadata = None result_dtype = None - indexcoord_metadata = None + indexcoord_metadata: CoordMetadata | None = None for i_sub, cube in enumerate(submesh_cubes): sub_str = f'Submesh cube #{i_sub + 1}/{len(submesh_cubes)}, "{cube.name()}"' @@ -197,7 +198,7 @@ def recombine_submeshes( if indexcoord_metadata is None: # Store first occurrence (from first region-cube) indexcoord_metadata = sub_metadata - elif sub_metadata != indexcoord_metadata: # type: ignore[unreachable] + elif sub_metadata != indexcoord_metadata: # This code is unreachable, is this a bug? # Compare subsequent occurrences (from other region-cubes) err = ( diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index aa973aaecc..9378d7ae1f 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -11,12 +11,11 @@ """ -from collections.abc import Iterable, Mapping +from collections.abc import Iterable, Iterator, Mapping from contextlib import contextmanager from copy import deepcopy from enum import Enum, auto import threading -from typing import Iterator import warnings import numpy as np @@ -765,10 +764,7 @@ def set( raise ValueError(msg) dim_chunks = self.var_dim_chunksizes.setdefault(var_name, {}) for dim_name, chunksize in dimension_chunksizes.items(): - if not ( - isinstance(dim_name, str) # type: ignore[redundant-expr] - and isinstance(chunksize, int) - ): + if not (isinstance(dim_name, str) and isinstance(chunksize, int)): msg = ( "'dimension_chunksizes' kwargs should be a dict " f"of `str: int` pairs, not {dimension_chunksizes!r}." diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 8755978962..5afe8f2208 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -2291,7 +2291,7 @@ def _increment_name(self, varname): def _lazy_stream_data( self, - data: np.ndarray | da.Array, + data: np.typing.ArrayLike, cf_var: CFVariable, ) -> None: if hasattr(data, "shape") and data.shape == (1,) + cf_var.shape: diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index c467c76f4b..8299021fb5 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -251,10 +251,12 @@ def _ensure_aligned(regrid_cache, src_cube, target_cube): return result_cube -_loader_attrs = ("field_generator", "field_generator_kwargs", "converter") - - -class Loader(collections.namedtuple("Loader", _loader_attrs)): # type: ignore[misc] +class Loader( + collections.namedtuple( + "Loader", + ("field_generator", "field_generator_kwargs", "converter"), + ) +): def __new__(cls, field_generator, field_generator_kwargs, converter): """Create a definition of a field-based Cube loader. diff --git a/pyproject.toml b/pyproject.toml index 60254113aa..c55f4597d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -167,7 +167,7 @@ ignore = [ ignore_missing_imports = true warn_unused_configs = true warn_unreachable = true -enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] +enable_error_code = ["ignore-without-code", "truthy-bool"] exclude = [ 'noxfile\.py', 'docs/src/conf\.py' From 931bb2503ffb7ea222fe8b56030ba81b5ed27ba6 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Wed, 26 Jun 2024 12:43:10 +0200 Subject: [PATCH 6/9] Update docs/src/whatsnew/latest.rst Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 38d84744b2..3245b2525e 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -126,7 +126,9 @@ This document explains the changes made to Iris for this release #. `@rcomer`_ made some :meth:`~iris.cube.Cube.slices_over` tests go faster (:pull:`5973`) -#. `@bouweandela`_ enabled mypy checks for type hints. (:pull:`5956`) +#. `@bouweandela`_ enabled mypy checks for type hints. + The entire team would like to thank Bouwe for putting in the hard + work on an unglamorous but highly valuable contribution. (:pull:`5956`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, From ac4d44ccf2738e741dce97c5b9bcb2110576777c Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Wed, 26 Jun 2024 13:05:50 +0200 Subject: [PATCH 7/9] Workaround for python/mypy#1465 Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- lib/iris/coords.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 1acc7ae7e4..56750d0262 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2704,7 +2704,12 @@ def _new_points_requirements(self, points): emsg = "The {!r} {} points array must be strictly monotonic." raise ValueError(emsg.format(self.name(), self.__class__.__name__)) - @Coord._values.setter # type: ignore[attr-defined] + @property + def _values(self): + # Overridden just to allow .setter override. + return super()._values + + @_values.setter def _values(self, points): # DimCoord always realises the points, to allow monotonicity checks. # Ensure it is an actual array, and also make our own copy so that we From c3e5d8d8f0288a00a94477a7129610c5d94b452a Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Wed, 26 Jun 2024 13:09:10 +0200 Subject: [PATCH 8/9] Also apply workaround for bounds --- lib/iris/coords.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 56750d0262..e563b56498 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2801,7 +2801,12 @@ def _new_bounds_requirements(self, bounds): return bounds - @Coord.bounds.setter # type: ignore[attr-defined] + @property + def bounds(self): + # Overridden just to allow .setter override. + return super().bounds + + @bounds.setter def bounds(self, bounds): if bounds is not None: # Ensure we have a realised array of new bounds values. From 74d32b9dd7c340239b3a390d9d7c616d02e02287 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Wed, 26 Jun 2024 13:11:36 +0200 Subject: [PATCH 9/9] Add type hint --- benchmarks/bm_runner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index ef1dc60993..e94b15d788 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -313,7 +313,7 @@ class _SubParserGenerator(ABC): description: str = NotImplemented epilog: str = NotImplemented - def __init__(self, subparsers) -> None: + def __init__(self, subparsers: argparse._SubParsersAction[ArgumentParser]) -> None: self.subparser: ArgumentParser = subparsers.add_parser( self.name, description=self.description,