From ddcf8db780897edf7dbd71814952f288a22109aa Mon Sep 17 00:00:00 2001 From: Joseph D Hughes Date: Sat, 9 Oct 2021 21:11:10 -0500 Subject: [PATCH 01/15] refactor(imports): add function to import optional packages --- flopy/utils/__init__.py | 1 + flopy/utils/rasters.py | 130 +++++++---------------------------- flopy/utils/utl_import.py | 138 ++++++++++++++++++++++++++++++++++++++ flopy/utils/voronoi.py | 9 ++- 4 files changed, 170 insertions(+), 108 deletions(-) create mode 100644 flopy/utils/utl_import.py diff --git a/flopy/utils/__init__.py b/flopy/utils/__init__.py index 22d57688d0..5c79fa2ba6 100644 --- a/flopy/utils/__init__.py +++ b/flopy/utils/__init__.py @@ -21,6 +21,7 @@ """ from .mfreadnam import parsenamefile +from .utl_import import import_optional_dependency from .util_array import Util3d, Util2d, Transient2d, Transient3d, read1d from .util_list import MfList from .binaryfile import ( diff --git a/flopy/utils/rasters.py b/flopy/utils/rasters.py index 53f4a17857..de7dc6ab64 100644 --- a/flopy/utils/rasters.py +++ b/flopy/utils/rasters.py @@ -2,20 +2,7 @@ import threading import queue -try: - import rasterio -except ImportError: - rasterio = None - -try: - import affine -except ImportError: - affine = None - -try: - import scipy -except ImportError: - scipy = None +from .utl_import import import_optional_dependency class Raster: @@ -70,24 +57,11 @@ def __init__( driver="GTiff", rio_ds=None, ): - if rasterio is None: - msg = ( - "Raster(): error " - + 'importing rasterio - try "pip install rasterio"' - ) - raise ImportError(msg) - else: - from rasterio.crs import CRS - - if affine is None: - msg = ( - "Raster(): error " - + 'importing affine - try "pip install affine"' - ) - raise ImportError(msg) - from .geometry import point_in_polygon + self._rasterio = import_optional_dependency("rasterio") + self._affine = import_optional_dependency("affine") + self._point_in_polygon = point_in_polygon self._array = array self._bands = bands @@ -112,12 +86,12 @@ def __init__( meta["dtype"] = dtype - if isinstance(crs, CRS): + if isinstance(crs, self._rasterio.crs.CRS): pass elif isinstance(crs, int): - crs = CRS.from_epsg(crs) + crs = self._rasterio.crs.CRS.from_epsg(crs) elif isinstance(crs, str): - crs = CRS.from_string(crs) + crs = self._rasterio.crs.CRS.from_string(crs) else: TypeError("crs type not understood, provide an epsg or proj4") @@ -128,7 +102,7 @@ def __init__( meta["height"] = height meta["width"] = width - if not isinstance(transform, affine.Affine): + if not isinstance(transform, self._affine.Affine): raise TypeError("Transform must be defined by an Affine object") meta["transform"] = transform @@ -142,7 +116,7 @@ def __init__( self.__xcenters = None self.__ycenters = None - if isinstance(rio_ds, rasterio.io.DatasetReader): + if isinstance(rio_ds, self._rasterio.io.DatasetReader): self._dataset = rio_ds @property @@ -389,13 +363,8 @@ def resample_to_grid( ------- np.array """ - if scipy is None: - print( - "Raster().resample_to_grid(): error " - + 'importing scipy - try "pip install scipy"' - ) - else: - from scipy.interpolate import griddata + import_optional_dependency("scipy") + from scipy.interpolate import griddata method = method.lower() if method in ("linear", "nearest", "cubic"): @@ -597,25 +566,6 @@ def crop(self, polygon, invert=False): self.__ycenters = None else: - # crop from user supplied points using numpy - if rasterio is None: - msg = ( - "Raster().crop(): error " - + 'importing rasterio try "pip install rasterio"' - ) - raise ImportError(msg) - else: - from rasterio.mask import mask - - if affine is None: - msg = ( - "Raster(),crop(): error " - + 'importing affine - try "pip install affine"' - ) - raise ImportError(msg) - else: - from affine import Affine - mask = self._intersection(polygon, invert) xc = self.xcenters @@ -676,7 +626,7 @@ def crop(self, polygon, invert=False): self._meta["height"] = crp_mask.shape[0] self._meta["width"] = crp_mask.shape[1] transform = self._meta["transform"] - self._meta["transform"] = Affine( + self._meta["transform"] = self._affine.Affine( transform[0], transform[1], xmin, @@ -712,15 +662,6 @@ def _sample_rio_dataset(self, polygon, invert): tuple : (arr_dict, raster_crp_meta) """ - if rasterio is None: - msg = ( - "Raster()._sample_rio_dataset(): error " - + 'importing rasterio try "pip install rasterio"' - ) - raise ImportError(msg) - else: - from rasterio.mask import mask - from .geospatial_utils import GeoSpatialUtil if isinstance(polygon, (list, tuple, np.ndarray)): @@ -729,7 +670,7 @@ def _sample_rio_dataset(self, polygon, invert): geom = GeoSpatialUtil(polygon, shapetype="Polygon") shapes = [geom] - rstr_crp, rstr_crp_affine = mask( + rstr_crp, rstr_crp_affine = self._rasterio.mask.mask( self._dataset, shapes, crop=True, invert=invert ) @@ -837,17 +778,11 @@ def write(self, name): output raster .tif file name """ - if rasterio is None: - msg = ( - "Raster().write(): error " - + 'importing rasterio - try "pip install rasterio"' - ) - raise ImportError(msg) if not name.endswith(".tif"): name += ".tif" - with rasterio.open(name, "w", **self._meta) as foo: + with self._rasterio.open(name, "w", **self._meta) as foo: for band, arr in self.__arr_dict.items(): foo.write(arr, band) @@ -866,12 +801,7 @@ def load(raster): Raster object """ - if rasterio is None: - msg = ( - "Raster().load(): error " - + 'importing rasterio - try "pip install rasterio"' - ) - raise ImportError(msg) + rasterio = import_optional_dependency("rasterio") dataset = rasterio.open(raster) array = dataset.read() @@ -908,17 +838,13 @@ def plot(self, ax=None, contour=False, **kwargs): ax : matplotlib.pyplot.axes """ - if rasterio is None: - msg = ( - "Raster().plot(): error " - + 'importing rasterio - try "pip install rasterio"' - ) - raise ImportError(msg) - else: - from rasterio.plot import show - if self._dataset is not None: - ax = show(self._dataset, ax=ax, contour=contour, **kwargs) + ax = self._rasterio.plot.show( + self._dataset, + ax=ax, + contour=contour, + **kwargs, + ) else: d0 = len(self.__arr_dict) @@ -936,7 +862,7 @@ def plot(self, ax=None, contour=False, **kwargs): i += 1 data = np.ma.masked_where(data == self.nodatavals, data) - ax = show( + ax = self._rasterio.plot.show( data, ax=ax, contour=contour, @@ -965,20 +891,12 @@ def histogram(self, ax=None, **kwargs): ax : matplotlib.pyplot.axes """ - if rasterio is None: - msg = ( - "Raster().histogram(): error " - + 'importing rasterio - try "pip install rasterio"' - ) - raise ImportError(msg) - else: - from rasterio.plot import show_hist if "alpha" not in kwargs: kwargs["alpha"] = 0.3 if self._dataset is not None: - ax = show_hist(self._dataset, ax=ax, **kwargs) + ax = self._rasterio.plot.show_hist(self._dataset, ax=ax, **kwargs) else: d0 = len(self.__arr_dict) @@ -996,6 +914,6 @@ def histogram(self, ax=None, **kwargs): i += 1 data = np.ma.masked_where(data == self.nodatavals, data) - ax = show_hist(data, ax=ax, **kwargs) + ax = self._rasterio.plot.show_hist(data, ax=ax, **kwargs) return ax diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py new file mode 100644 index 0000000000..69da4885dc --- /dev/null +++ b/flopy/utils/utl_import.py @@ -0,0 +1,138 @@ +from __future__ import annotations + +import importlib +import sys +import types +import warnings + +# Update install.rst when updating versions! + +VERSIONS = { + # "bs4": "4.8.2", + # "bottleneck": "1.3.1", + # "fsspec": "0.7.4", + # "fastparquet": "0.4.0", + # "gcsfs": "0.6.0", + # "lxml.etree": "4.5.0", + # "matplotlib": "3.3.2", + # "numexpr": "2.7.1", + # "odfpy": "1.4.1", + # "openpyxl": "3.0.2", + # "pandas_gbq": "0.14.0", + # "pyarrow": "0.17.0", + # "pytest": "6.0", + # "pyxlsb": "1.0.6", + # "s3fs": "0.4.0", + # "scipy": "1.4.1", + # "sqlalchemy": "1.3.11", + # "tables": "3.6.1", + # "tabulate": "0.8.7", + # "xarray": "0.15.1", + # "xlrd": "2.0.1", + # "xlwt": "1.3.0", + # "xlsxwriter": "1.2.2", + # "numba": "0.50.1", +} + +# A mapping from import name to package name (on PyPI) for packages where +# these two names are different. + +INSTALL_MAPPING = { + "shapefile": "pyshp", +} + + +def get_version(module: types.ModuleType) -> str: + version = getattr(module, "__version__", None) + if version is None: + # xlrd uses a capitalized attribute name + version = getattr(module, "__VERSION__", None) + + if version is None: + raise ImportError(f"Can't determine version for {module.__name__}") + return version + + +def import_optional_dependency( + name: str, + extra: str = "", + errors: str = "raise", + min_version: str | None = None, +): + """ + Import an optional dependency. + + By default, if a dependency is missing an ImportError with a nice + message will be raised. If a dependency is present, but too old, + we raise. + + Parameters + ---------- + name : str + The module name. + extra : str + Additional text to include in the ImportError message. + errors : str {'raise', 'warn', 'ignore'} + What to do when a dependency is not found or its version is too old. + + * raise : Raise an ImportError + * warn : Only applicable when a module's version is to old. + Warns that the version is too old and returns None + * ignore: If the module is not installed, return None, otherwise, + return the module, even if the version is too old. + It's expected that users validate the version locally when + using ``errors="ignore"`` (see. ``io/html.py``) + min_version : str, default None + Specify a minimum version that is different from the global pandas + minimum version required. + Returns + ------- + maybe_module : Optional[ModuleType] + The imported module, when found and the version is correct. + None is returned when the package is not found and `errors` + is False, or when the package's version is too old and `errors` + is ``'warn'``. + """ + + assert errors in {"warn", "raise", "ignore"} + + package_name = INSTALL_MAPPING.get(name) + install_name = package_name if package_name is not None else name + + msg = ( + f"Missing optional dependency '{install_name}'. {extra} " + f"Use pip or conda to install {install_name}." + ) + try: + module = importlib.import_module(name) + except ImportError: + if errors == "raise": + raise ImportError(msg) + else: + print(msg) + return None + + # Handle submodules: if we have submodule, grab parent module from sys.modules + parent = name.split(".")[0] + if parent != name: + install_name = parent + module_to_get = sys.modules[install_name] + else: + module_to_get = module + minimum_version = ( + min_version if min_version is not None else VERSIONS.get(parent) + ) + if minimum_version: + version = get_version(module_to_get) + if Version(version) < Version(minimum_version): + msg = ( + f"Pandas requires version '{minimum_version}' or newer of '{parent}' " + f"(version '{version}' currently installed)." + ) + if errors == "warn": + warnings.warn(msg, UserWarning) + return None + elif errors == "raise": + raise ImportError(msg) + + return module diff --git a/flopy/utils/voronoi.py b/flopy/utils/voronoi.py index 4740f43c8e..48376bc873 100644 --- a/flopy/utils/voronoi.py +++ b/flopy/utils/voronoi.py @@ -1,8 +1,9 @@ import numpy as np -from scipy.spatial import Voronoi from .cvfdutil import get_disv_gridprops from .geometry import point_in_polygon +from .utl_import import import_optional_dependency + def get_sorted_vertices(icell_vertices, vertices): centroid = vertices[icell_vertices].mean(axis=0) @@ -74,6 +75,10 @@ def tri2vor(tri, **kwargs): verts, iverts : ndarray, list of lists """ + extra = "Voronoi requires SciPy." + import_optional_dependency("scipy.spatial", extra=extra) + from scipy.spatial import Voronoi + # assign local variables tri_verts = tri.verts tri_iverts = tri.iverts @@ -341,7 +346,7 @@ def plot(self, ax=None, plot_title=True, **kwargs): axes that contains the voronoi model grid """ - import matplotlib.pyplot as plt + plt = import_optional_dependency("matplotlib.pyplot") if ax is None: ax = plt.subplot(1, 1, 1, aspect="equal") From ed7997c4b2952b823156d35f263c41a46170f1f2 Mon Sep 17 00:00:00 2001 From: Joseph D Hughes Date: Mon, 11 Oct 2021 14:18:14 -0500 Subject: [PATCH 02/15] refactor(imports): add function to import optional packages --- flopy/export/metadata.py | 19 +- flopy/export/netcdf.py | 42 +- flopy/export/shapefile_utils.py | 47 +-- flopy/export/utils.py | 11 +- flopy/export/vtk.py | 8 +- flopy/mf6/utils/binaryfile_utils.py | 8 +- flopy/mf6/utils/mfobservation.py | 9 +- flopy/modflow/mfsfr2.py | 25 +- flopy/plot/crosssection.py | 16 +- flopy/plot/map.py | 12 +- flopy/plot/plotutil.py | 70 +--- flopy/plot/styles.py | 11 +- flopy/utils/flopy_io.py | 19 +- flopy/utils/geometry.py | 30 +- flopy/utils/geospatial_utils.py | 32 +- flopy/utils/gridgen.py | 15 +- flopy/utils/gridintersect.py | 61 +-- flopy/utils/mflistfile.py | 8 +- flopy/utils/mtlistfile.py | 18 +- flopy/utils/observationfile.py | 10 +- flopy/utils/parse_version.py | 602 ++++++++++++++++++++++++++++ flopy/utils/rasters.py | 33 +- flopy/utils/sfroutputfile.py | 9 +- flopy/utils/triangle.py | 32 +- flopy/utils/util_list.py | 8 +- flopy/utils/utl_import.py | 4 + flopy/utils/voronoi.py | 2 +- flopy/utils/zonbud.py | 11 +- setup.py | 1 + 29 files changed, 783 insertions(+), 390 deletions(-) create mode 100644 flopy/utils/parse_version.py diff --git a/flopy/export/metadata.py b/flopy/export/metadata.py index d73646c291..36cefcf3d1 100644 --- a/flopy/export/metadata.py +++ b/flopy/export/metadata.py @@ -1,10 +1,7 @@ -from flopy.utils.flopy_io import get_url_text import numpy as np -try: - import pandas as pd -except: - pd = False +from ..utils.flopy_io import get_url_text +from ..utils import import_optional_dependency class acdd: @@ -194,13 +191,15 @@ def time_coverage(self): ------- """ + pd = import_optional_dependency("pandas", errors="ignore") + l = self.sb["dates"] tc = {} for t in ["start", "end"]: tc[t] = [d.get("dateString") for d in l if t in d["type"].lower()][ 0 ] - if not np.all(self.model_time.steady_state) and pd: + if not np.all(self.model_time.steady_state) and pd is not None: # replace with times from model reference tc["start"] = self.model_time.start_datetime strt = pd.Timestamp(self.model_time.start_datetime) @@ -260,7 +259,6 @@ def get_sciencebase_metadata(self, id): url = urlbase.format(id) import json - from flopy.utils.flopy_io import get_url_text msg = "Need an internet connection to get metadata from ScienceBase." text = get_url_text(url, error_msg=msg) @@ -283,11 +281,8 @@ def get_sciencebase_xml_metadata(self): metadata : dict Dictionary of metadata """ - try: - # use defusedxml to removed XML security vulnerabilities - import defusedxml.ElementTree as ET - except ImportError: - raise ImportError("DefusedXML must be installed to query metadata") + # use defusedxml to removed XML security vulnerabilities + ET = import_optional_dependency("defusedxml.ElementTree") url = self.xmlfile msg = "Need an internet connection to get metadata from ScienceBase." diff --git a/flopy/export/netcdf.py b/flopy/export/netcdf.py index 7202995936..11d40112f0 100644 --- a/flopy/export/netcdf.py +++ b/flopy/export/netcdf.py @@ -7,7 +7,8 @@ from datetime import datetime import time from .metadata import acdd -import flopy + +from ..utils import import_optional_dependency # globals FILLVALUE = -99999.9 @@ -188,15 +189,9 @@ def __init__( ) self.shape = self.model_grid.shape - try: - import dateutil.parser - except ImportError: - raise ImportError( - "python-dateutil is not installed\n" - "try pip install python-dateutil" - ) + parser = import_optional_dependency("dateutil.parser") - dt = dateutil.parser.parse(self.model_time.start_datetime) + dt = parser.parse(self.model_time.start_datetime) self.start_datetime = dt.strftime("%Y-%m-%dT%H:%M:%SZ") self.logger.log(f"start datetime:{self.start_datetime}") @@ -488,11 +483,8 @@ def difference( assert ( self.nc is not None ), "can't call difference() if nc hasn't been populated" - try: - import netCDF4 - except ImportError as e: - self.logger.warn("error importing netCDF module") - raise ImportError("NetCdf error importing netCDF4 module") from e + + netCDF4 = import_optional_dependency("netCFD4") if isinstance(other, str): assert os.path.exists(other), f"filename 'other' not found:{other}" @@ -683,10 +675,7 @@ def initialize_geometry(self): """initialize the geometric information needed for the netcdf file """ - try: - import pyproj - except ImportError as e: - raise ImportError("NetCdf error importing pyproj module") from e + pyproj = import_optional_dependency("pyproj") from distutils.version import LooseVersion # Check if using newer pyproj version conventions @@ -763,6 +752,9 @@ def initialize_file(self, time_values=None): self.model.dis.perlen and self.start_datetime """ + from ..version import __version__ as version + from ..export.shapefile_utils import CRS + if self.nc is not None: raise Exception("nc file already initialized") @@ -770,11 +762,8 @@ def initialize_file(self, time_values=None): self.log("initializing geometry") self.initialize_geometry() self.log("initializing geometry") - try: - import netCDF4 - except ImportError as e: - self.logger.warn("error importing netCDF module") - raise ImportError("NetCdf error importing netCDF4 module") from e + + netCDF4 = import_optional_dependency("netCDF4") # open the file for writing try: @@ -787,7 +776,7 @@ def initialize_file(self, time_values=None): self.nc.setncattr( "Conventions", - f"CF-1.6, ACDD-1.3, flopy {flopy.__version__}", + f"CF-1.6, ACDD-1.3, flopy {version}", ) self.nc.setncattr( "date_created", datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") @@ -924,8 +913,9 @@ def initialize_file(self, time_values=None): y[:] = self.model_grid.xyzcellcenters[1] # grid mapping variable - crs = flopy.export.shapefile_utils.CRS( - prj=self.model_grid.prj, epsg=self.model_grid.epsg + crs = CRS( + prj=self.model_grid.prj, + epsg=self.model_grid.epsg, ) attribs = crs.grid_mapping_attribs if attribs is not None: diff --git a/flopy/export/shapefile_utils.py b/flopy/export/shapefile_utils.py index fd9bf58714..21ee609147 100755 --- a/flopy/export/shapefile_utils.py +++ b/flopy/export/shapefile_utils.py @@ -11,48 +11,13 @@ import warnings from ..datbase import DataType, DataInterface -from ..utils import Util3d +from ..utils import Util3d, import_optional_dependency + # web address of spatial reference dot org srefhttp = "https://spatialreference.org" -def import_shapefile(check_version=True): - """Import shapefile module from pyshp. - - Parameters - ---------- - check_version : bool - Checks to ensure that pyshp is at least version 2. Default True, - which is usually required for Writer (which has a different API), but - can be False if only using Reader. - - Returns - ------- - module - - Raises - ------ - ImportError - If shapefile module is not found, or major version is less than 2. - """ - try: - import shapefile - except ImportError: - raise ImportError( - inspect.getouterframes(inspect.currentframe())[1][3] - + ": error importing shapefile; try pip install pyshp" - ) - if check_version: - if int(shapefile.__version__.split(".")[0]) < 2: - raise ImportError( - inspect.getouterframes(inspect.currentframe())[1][3] - + ": shapefile version 2 or later required; try " - "pip install --upgrade pyshp" - ) - return shapefile - - def write_gridlines_shapefile(filename, mg): """ Write a polyline shapefile of the grid lines - a lightweight alternative @@ -69,7 +34,7 @@ def write_gridlines_shapefile(filename, mg): None """ - shapefile = import_shapefile() + shapefile = import_optional_dependency("shapefile") wr = shapefile.Writer(filename, shapeType=shapefile.POLYLINE) wr.field("number", "N", 18, 0) if mg.__class__.__name__ == "SpatialReference": @@ -120,7 +85,7 @@ def write_grid_shapefile( None """ - shapefile = import_shapefile() + shapefile = import_optional_dependency("shapefile") w = shapefile.Writer(filename, shapeType=shapefile.POLYGON) w.autoBalance = 1 @@ -532,7 +497,7 @@ def shp2recarray(shpname): """ from ..utils.geospatial_utils import GeoSpatialCollection - sf = import_shapefile(check_version=False) + sf = import_optional_dependency("shapefile") sfobj = sf.Reader(shpname) dtype = [ @@ -611,7 +576,7 @@ def recarray2shp( continue # set up for pyshp 2 - shapefile = import_shapefile() + shapefile = import_optional_dependency("shapefile") w = shapefile.Writer(shpname, shapeType=geomtype) w.autoBalance = 1 diff --git a/flopy/export/utils.py b/flopy/export/utils.py index ec3a859803..bf610082ef 100644 --- a/flopy/export/utils.py +++ b/flopy/export/utils.py @@ -14,6 +14,7 @@ from . import NetCdf, netcdf from . import shapefile_utils from . import vtk +from ..utils import import_optional_dependency NC_PRECISION_TYPE = { @@ -1510,6 +1511,9 @@ def export_array( a = a.copy() a[np.isnan(a)] = nodata if modelgrid.angrot != 0: + ndimage = import_optional_dependency("scipy.ndimage") + from ndimage import rotate + try: from scipy.ndimage import rotate except ImportError: @@ -1808,11 +1812,8 @@ def export_array_contours( **kwargs : keyword arguments to flopy.export.shapefile_utils.recarray2shp """ - try: - import matplotlib.pyplot as plt - except: - err_msg = "matplotlib must be installed to use export_array_contours()" - raise ImportError(err_msg) + import matplotlib.pyplot as plt + from ..utils import import_optional_dependency if epsg is None: epsg = modelgrid.epsg diff --git a/flopy/export/vtk.py b/flopy/export/vtk.py index 12f44f07e9..16696cab75 100644 --- a/flopy/export/vtk.py +++ b/flopy/export/vtk.py @@ -9,6 +9,8 @@ from flopy.datbase import DataType, DataInterface from flopy.utils import Util3d +from ..utils import import_optional_dependency + warnings.simplefilter("always", DeprecationWarning) @@ -121,11 +123,7 @@ def __init__( point_scalars=False, ): - try: - import vtk - except ImportError: - err = "vtk not installed, use pip install vtk" - raise ImportError(err) + vtk = import_optional_dependency("vtk") if model is None and modelgrid is None: raise AssertionError( diff --git a/flopy/mf6/utils/binaryfile_utils.py b/flopy/mf6/utils/binaryfile_utils.py index 596ff3ffe3..b97967a193 100644 --- a/flopy/mf6/utils/binaryfile_utils.py +++ b/flopy/mf6/utils/binaryfile_utils.py @@ -1,6 +1,7 @@ import os import numpy as np from ...utils import binaryfile as bf +from ...utils import import_optional_dependency class MFOutput: @@ -228,11 +229,8 @@ def _get_vertices(mfdict, key): elevations corresponding to a row column location """ - try: - import pandas as pd - except Exception as e: - msg = "MFOutputRequester._get_vertices(): requires pandas" - raise ImportError(msg) + extra = "MFOutputRequester._get_vertices() requires pandas." + pd = import_optional_dependency("pandas", extra=extra) mname = key[0] cellid = mfdict[(mname, "DISV8", "CELL2D", "cell2d_num")] diff --git a/flopy/mf6/utils/mfobservation.py b/flopy/mf6/utils/mfobservation.py index dc483441b2..a3869022c7 100644 --- a/flopy/mf6/utils/mfobservation.py +++ b/flopy/mf6/utils/mfobservation.py @@ -1,6 +1,8 @@ import numpy as np import csv +from ...utils import import_optional_dependency + def try_float(data): try: @@ -207,11 +209,8 @@ def get_dataframe( pd.DataFrame """ - try: - import pandas as pd - except Exception as e: - print("this feature requires pandas") - return None + extra = "get_dataframe() requires pandas." + pd = import_optional_dependency("pandas", extra=extra) data_str = self._reader(self.Obsname) data = self._array_to_dict(data_str) diff --git a/flopy/modflow/mfsfr2.py b/flopy/modflow/mfsfr2.py index 5367f1e70a..cddb6e3513 100644 --- a/flopy/modflow/mfsfr2.py +++ b/flopy/modflow/mfsfr2.py @@ -10,11 +10,7 @@ from ..utils.flopy_io import line_parse from ..utils.recarray_utils import create_empty_recarray from ..utils.optionblock import OptionBlock - -try: - import pandas as pd -except: - pd = False +from ..utils import import_optional_dependency class ModflowSfr2(Package): @@ -650,11 +646,8 @@ def paths(self): @property def df(self): - if pd: - return pd.DataFrame(self.reach_data) - else: - msg = "ModflowSfr2.df: pandas not available" - raise ImportError(msg) + pd = import_optional_dependency("pandas") + return pd.DataFrame(self.reach_data) def _make_graph(self): # get all segments and their outseg @@ -1581,15 +1574,9 @@ def plot_path(self, start_seg=None, end_seg=0, plot_segment_lines=True): ------- ax : matplotlib.axes._subplots.AxesSubplot object """ - try: - import matplotlib.pyplot as plt - except: - raise ImportError( - "matplotlib must be installed to use ModflowSfr2.plot_path()" - ) - if not pd: - err_msg = "ModflowSfr2.plot_path: pandas not available" - raise ImportError(err_msg) + import matplotlib.pyplot as plt + + pd = import_optional_dependency("pandas") df = self.df m = self.parent diff --git a/flopy/plot/crosssection.py b/flopy/plot/crosssection.py index 49b213eda5..2c8cea4faf 100644 --- a/flopy/plot/crosssection.py +++ b/flopy/plot/crosssection.py @@ -1,14 +1,12 @@ import numpy as np -try: - import matplotlib.pyplot as plt - import matplotlib.colors - from matplotlib.patches import Polygon -except (ImportError, ModuleNotFoundError, RuntimeError): - plt = None - -from flopy.plot import plotutil -from flopy.utils import geometry +import matplotlib.pyplot as plt +import matplotlib.colors +from matplotlib.patches import Polygon + +from . import plotutil +from ..utils import geometry, import_optional_dependency + import copy import warnings diff --git a/flopy/plot/map.py b/flopy/plot/map.py index 0ea444003f..eeb7f6e3fe 100644 --- a/flopy/plot/map.py +++ b/flopy/plot/map.py @@ -1,14 +1,10 @@ import numpy as np -from ..discretization import StructuredGrid, UnstructuredGrid from ..utils import geometry -try: - import matplotlib.pyplot as plt - import matplotlib.colors - from matplotlib.collections import PathCollection, LineCollection - from matplotlib.path import Path -except (ImportError, ModuleNotFoundError, RuntimeError): - plt = None +import matplotlib.pyplot as plt +import matplotlib.colors +from matplotlib.collections import PathCollection, LineCollection +from matplotlib.path import Path from . import plotutil import warnings diff --git a/flopy/plot/plotutil.py b/flopy/plot/plotutil.py index fa1f43671e..1f8b721db2 100644 --- a/flopy/plot/plotutil.py +++ b/flopy/plot/plotutil.py @@ -7,19 +7,10 @@ import os import numpy as np import warnings -from ..utils import Util3d +import matplotlib.pyplot as plt +from ..utils import Util3d, import_optional_dependency from ..datbase import DataType, DataInterface -try: - import shapefile -except ImportError: - shapefile = None - -try: - import matplotlib.pyplot as plt -except (ImportError, RuntimeError): - plt = None - warnings.simplefilter("ignore", RuntimeWarning) bc_color_dict = { @@ -1107,13 +1098,6 @@ def _plot_array_helper( "modelgrid": None, } - # check that matplotlib is installed - if plt is None: - raise ImportError( - "Could not import matplotlib. Must install matplotlib " - "in order to plot LayerFile data." - ) - for key in defaults: if key in kwargs: defaults[key] = kwargs.pop(key) @@ -1259,12 +1243,6 @@ def _plot_bc_helper( from .map import PlotMapView - if plt is None: - raise ImportError( - "Could not import matplotlib. Must install matplotlib " - "in order to plot boundary condition data." - ) - defaults = { "figsize": None, "inactive": True, @@ -2020,11 +1998,7 @@ def shapefile_extents(shp): >>> extent = flopy.plot.plotutil.shapefile_extents(fshp) """ - if shapefile is None: - raise ImportError( - "Could not import shapefile. " - "Must install pyshp in order to plot shapefiles." - ) + shapefile = import_optional_dependency("shapefile") sf = shapefile.Reader(shp) shapes = sf.shapes() @@ -2064,11 +2038,7 @@ def shapefile_get_vertices(shp): >>> lines = flopy.plot.plotutil.shapefile_get_vertices(fshp) """ - if shapefile is None: - raise ImportError( - "Could not import shapefile. " - "Must install pyshp in order to plot shapefiles." - ) + shapefile = import_optional_dependency("shapefile") sf = shapefile.Reader(shp) shapes = sf.shapes() @@ -2117,22 +2087,12 @@ def shapefile_to_patch_collection(shp, radius=500.0, idx=None): Patch collection of shapes in the shapefile """ - if shapefile is None: - raise ImportError( - "Could not import shapefile. " - "Must install pyshp in order to plot shapefiles." - ) - if plt is None: - raise ImportError( - "matplotlib must be installed to " - "use shapefile_to_patch_collection()" - ) - else: - from matplotlib.patches import Polygon, Circle, PathPatch - import matplotlib.path as MPath - from matplotlib.collections import PatchCollection - from ..utils.geospatial_utils import GeoSpatialCollection - from ..utils.geometry import point_in_polygon + from matplotlib.patches import Polygon, Circle, PathPatch + import matplotlib.path as MPath + from matplotlib.collections import PatchCollection + + from ..utils.geospatial_utils import GeoSpatialCollection + from ..utils.geometry import point_in_polygon geofeats = GeoSpatialCollection(shp) shapes = geofeats.shape @@ -2264,13 +2224,6 @@ def plot_shapefile( -------- """ - - if shapefile is None: - raise ImportError( - "Could not import shapefile. " - "Must install pyshp in order to plot shapefiles." - ) - vmin = kwargs.pop("vmin", None) vmax = kwargs.pop("vmax", None) @@ -2400,9 +2353,6 @@ def plot_cvfd( "Use PlotMapView for plotting", DeprecationWarning, ) - if plt is None: - err_msg = "matplotlib must be installed to use plot_cvfd()" - raise ImportError(err_msg) if "vmin" in kwargs: vmin = kwargs.pop("vmin") diff --git a/flopy/plot/styles.py b/flopy/plot/styles.py index d402cf6e12..4480a85c5d 100644 --- a/flopy/plot/styles.py +++ b/flopy/plot/styles.py @@ -1,11 +1,7 @@ -try: - import matplotlib.pyplot as plt - import matplotlib as mpl -except (ImportError, ModuleNotFoundError, RuntimeError): - plt = None - import os import platform +import matplotlib as mpl +import matplotlib.pyplot as plt class styles: @@ -53,6 +49,7 @@ def set_font_type(cls, family, fontname): ------- None """ + mpl = import_optional_dependency("matplotlib") mpl.rcParams["font.family"] = family mpl.rcParams[f"font.{family}"] = fontname return mpl.rcParams @@ -435,6 +432,8 @@ def __set_fontspec(cls, bold=True, italic=True, fontsize=9, family=False): ------- dict """ + mpl = import_optional_dependency("matplotlib") + family = mpl.rcParams["font.family"][0] font = mpl.rcParams[f"font.{family}"][0] diff --git a/flopy/utils/flopy_io.py b/flopy/utils/flopy_io.py index eb96d09b46..0fefeec450 100755 --- a/flopy/utils/flopy_io.py +++ b/flopy/utils/flopy_io.py @@ -4,12 +4,7 @@ import os import sys import numpy as np - -try: - import pandas as pd -except: - pd = False - +from ..utils import import_optional_dependency def _fmt_string(array, float_format="{}"): """ @@ -353,14 +348,14 @@ def loadtxt( """ # test if pandas should be used, if available if use_pandas: - if pd: - if delimiter.isspace(): - kwargs["delim_whitespace"] = True - if isinstance(dtype, np.dtype) and "names" not in kwargs: - kwargs["names"] = dtype.names + pd = import_optional_dependency("pandas") + if delimiter.isspace(): + kwargs["delim_whitespace"] = True + if isinstance(dtype, np.dtype) and "names" not in kwargs: + kwargs["names"] = dtype.names # if use_pandas and pd then use pandas - if use_pandas and pd: + if use_pandas: df = pd.read_csv(file, dtype=dtype, skiprows=skiprows, **kwargs) return df.to_records(index=False) # default use of numpy diff --git a/flopy/utils/geometry.py b/flopy/utils/geometry.py index 7f3eca1706..e095a51897 100644 --- a/flopy/utils/geometry.py +++ b/flopy/utils/geometry.py @@ -3,6 +3,8 @@ """ import numpy as np +from ..utils import import_optional_dependency + class Shape: """ @@ -382,12 +384,10 @@ def bounds(self): @property def pyshp_parts(self): - from ..export.shapefile_utils import import_shapefile - # exterior ring must be clockwise (negative area) # interiors rings must be counter-clockwise (positive area) - shapefile = import_shapefile() + shapefile = import_optional_dependency("shapefile") exterior = list(self.exterior) if shapefile.signed_area(exterior) > 0: @@ -410,12 +410,9 @@ def patch(self): return self.get_patch() def get_patch(self, **kwargs): - try: - from descartes import PolygonPatch - except ImportError: - print( - 'This feature requires descartes.\nTry "pip install descartes"' - ) + descartes = import_optional_dependency("descartes") + from descartes import PolygonPatch + return PolygonPatch(self.geojson, **kwargs) def plot(self, ax=None, **kwargs): @@ -427,10 +424,7 @@ def plot(self, ax=None, **kwargs): Accepts keyword arguments to descartes.PolygonPatch. Requires the descartes package (pip install descartes). """ - try: - import matplotlib.pyplot as plt - except ImportError: - print("This feature requires matplotlib.") + import matplotlib.pyplot as plt if ax is None: ax = plt.gca() @@ -522,10 +516,7 @@ def pyshp_parts(self): return [self.coords] def plot(self, ax=None, **kwargs): - try: - import matplotlib.pyplot as plt - except ImportError: - print("This feature requires matplotlib.") + import matplotlib.pyplot as plt if ax is None: ax = plt.gca() @@ -621,10 +612,7 @@ def pyshp_parts(self): return self.coords def plot(self, ax=None, **kwargs): - try: - import matplotlib.pyplot as plt - except ImportError: - print("This feature requires matplotlib.") + import matplotlib.pyplot as plt if ax is None: ax = plt.gca() diff --git a/flopy/utils/geospatial_utils.py b/flopy/utils/geospatial_utils.py index d0ce58b62d..d6ff9efec0 100644 --- a/flopy/utils/geospatial_utils.py +++ b/flopy/utils/geospatial_utils.py @@ -1,5 +1,10 @@ -try: - import shapely +import numpy as np + +from ..utils.geometry import Shape, Collection +from ..utils import import_optional_dependency + +shapely = import_optional_dependency("shapely", errors="ignore") +if shapely is not None: from shapely.geometry import ( MultiPolygon, Polygon, @@ -8,18 +13,8 @@ LineString, MultiLineString, ) -except: - shapely = None - -try: - import geojson -except: - geojson = None - -import numpy as np -from flopy.utils.geometry import Shape, Collection - +geojson = import_optional_dependency("geojson", errors="ignore") geojson_classes = {} if geojson is not None: geojson_classes = { @@ -62,9 +57,9 @@ class GeoSpatialUtil: """ def __init__(self, obj, shapetype=None): - from ..export.shapefile_utils import import_shapefile - - self.__shapefile = import_shapefile() + self.__shapefile = import_optional_dependency( + "shapefile", errors="ignore" + ) self.__obj = obj self.__geo_interface = {} self._geojson = None @@ -265,9 +260,10 @@ class GeoSpatialCollection: """ def __init__(self, obj, shapetype=None): - from ..export.shapefile_utils import import_shapefile - self.__shapefile = import_shapefile() + self.__shapefile = import_optional_dependency( + "shapefile", errors="ignore" + ) self.__obj = obj self.__collection = [] self._geojson = None diff --git a/flopy/utils/gridgen.py b/flopy/utils/gridgen.py index d20f198ff6..1f3726639c 100644 --- a/flopy/utils/gridgen.py +++ b/flopy/utils/gridgen.py @@ -6,7 +6,8 @@ from ..modflow.mfdisu import ModflowDisU from ..mf6.modflow import ModflowGwfdis from .util_array import Util2d # read1d, -from ..export.shapefile_utils import import_shapefile, shp2recarray +from ..utils import import_optional_dependency +from ..export.shapefile_utils import shp2recarray from ..mbase import which @@ -59,7 +60,7 @@ def features_to_shapefile(features, featuretype, filename): """ from .geospatial_utils import GeoSpatialCollection - shapefile = import_shapefile(check_version=True) + shapefile = import_optional_dependency("shapefile") if featuretype.lower() == "line": featuretype = "LineString" @@ -680,12 +681,7 @@ def plot( pc : matplotlib.collections.PatchCollection """ - try: - import matplotlib.pyplot as plt - except: - err_msg = "matplotlib must be installed to use gridgen.plot()" - raise ImportError(err_msg) - + import matplotlib.pyplot as plt from ..plot import plot_shapefile, shapefile_extents if ax is None: @@ -1920,7 +1916,8 @@ def _mkvertdict(self): None """ - shapefile = import_shapefile(check_version=False) + shapefile = import_optional_dependency("shapefile") + # ensure there are active leaf cells from gridgen fname = os.path.join(self.model_ws, "qtg.nod") if not os.path.isfile(fname): diff --git a/flopy/utils/gridintersect.py b/flopy/utils/gridintersect.py index af6651b34e..f1ff8813e9 100644 --- a/flopy/utils/gridintersect.py +++ b/flopy/utils/gridintersect.py @@ -1,14 +1,12 @@ import numpy as np -try: - import matplotlib.pyplot as plt -except (ImportError, RuntimeError): - plt = None +from .utl_import import import_optional_dependency from .geometry import transform from .geospatial_utils import GeoSpatialUtil -try: +shapely = import_optional_dependency("shapely", errors="ignore") +if shapely is not None: from shapely.geometry import ( MultiPoint, Point, @@ -21,30 +19,24 @@ from shapely.affinity import translate, rotate from shapely.prepared import prep - shply = True -except: - shply = False - import contextlib import warnings from distutils.version import LooseVersion NUMPY_GE_121 = str(np.__version__) >= LooseVersion("1.21") -try: - import shapely - +if shapely is not None: SHAPELY_GE_20 = str(shapely.__version__) >= LooseVersion("2.0") SHAPELY_LT_18 = str(shapely.__version__) < LooseVersion("1.8") -except ImportError: - shapely = None +else: SHAPELY_GE_20 = False SHAPELY_LT_18 = False -try: - from shapely.errors import ShapelyDeprecationWarning as shapely_warning -except ImportError: - shapely_warning = None +if shapely is not None: + try: + from shapely.errors import ShapelyDeprecationWarning as shapely_warning + except ImportError: + shapely_warning = None if shapely_warning is not None and not SHAPELY_GE_20: @@ -173,13 +165,7 @@ def __init__(self, mfgrid, method=None, rtree=True): loop through all model gridcells (which is generally slower). Only read when `method='vertex'`. """ - if not shply: - msg = ( - "Shapely is needed for grid intersect operations! " - "Please install shapely if you need to use grid intersect " - "functionality." - ) - raise ModuleNotFoundError(msg) + import_optional_dependency("shapely") self.mfgrid = mfgrid if method is None: @@ -1481,20 +1467,11 @@ def plot_polygon(rec, ax=None, **kwargs): ax: matplotlib.pyplot.axes returns the axes handle """ - try: - from descartes import PolygonPatch - except ImportError: - msg = "descartes package needed for plotting polygons" - if plt is None: - msg = ( - "matplotlib and descartes packages needed for " - "plotting polygons" - ) - raise ImportError(msg) - if plt is None: - msg = "matplotlib package needed for plotting polygons" - raise ImportError(msg) + import matplotlib.pyplot as plt + + import_optional_dependency("descartes") + from descartes import PolygonPatch if ax is None: _, ax = plt.subplots() @@ -1533,9 +1510,7 @@ def plot_linestring(rec, ax=None, cmap=None, **kwargs): ax: matplotlib.pyplot.axes returns the axes handle """ - if plt is None: - msg = "matplotlib package needed for plotting polygons" - raise ImportError(msg) + import matplotlib.pyplot as plt if ax is None: _, ax = plt.subplots() @@ -1587,9 +1562,7 @@ def plot_point(rec, ax=None, **kwargs): ax: matplotlib.pyplot.axes returns the axes handle """ - if plt is None: - msg = "matplotlib package needed for plotting polygons" - raise ImportError(msg) + import matplotlib.pyplot as plt if ax is None: _, ax = plt.subplots() diff --git a/flopy/utils/mflistfile.py b/flopy/utils/mflistfile.py index 3af271a484..e982c68eb4 100644 --- a/flopy/utils/mflistfile.py +++ b/flopy/utils/mflistfile.py @@ -12,6 +12,7 @@ from ..utils.utils_def import totim_to_datetime from ..utils.flopy_io import get_ts_sp +from ..utils import import_optional_dependency class ListBudget: @@ -474,11 +475,8 @@ def get_dataframes(self, start_datetime="1-1-1970", diff=False): """ - try: - import pandas as pd - except Exception as e: - msg = f"ListBudget.get_dataframe(): requires pandas: {e!s}" - raise ImportError(msg) + extra = "ListBudget.get_dataframes() requires pandas." + pd = import_optional_dependency("pandas", extra=extra) if not self._isvalid: return None diff --git a/flopy/utils/mtlistfile.py b/flopy/utils/mtlistfile.py index 31cb63ad90..1d1232ccfc 100644 --- a/flopy/utils/mtlistfile.py +++ b/flopy/utils/mtlistfile.py @@ -3,14 +3,10 @@ mt3d(usgs) run. Also includes support for SFT budget. """ -import os -import sys import warnings -from datetime import timedelta import numpy as np -from ..utils.utils_def import totim_to_datetime - +from ..utils import import_optional_dependency class MtListBudget: """ @@ -78,10 +74,8 @@ def parse( (optionally) surface-water mass budget. If the SFT process is not used, df_sw is None. """ - try: - import pandas as pd - except: - raise ImportError("MtListBudget.parse: pandas not available") + extra = "MtListBudget.parse() requires pandas." + pd = import_optional_dependency("pandas", extra=extra) self.gw_data = {} self.sw_data = {} @@ -188,10 +182,8 @@ def parse( return df_gw, df_sw def _diff(self, df): - try: - import pandas as pd - except: - raise ImportError("MtListBudget._diff: pandas not available") + extra = "MtListBudget._diff() requires pandas." + pd = import_optional_dependency("pandas", extra=extra) out_cols = [ c for c in df.columns if "_out" in c and not c.startswith("net_") diff --git a/flopy/utils/observationfile.py b/flopy/utils/observationfile.py index 85a9172e48..5c9081f8ff 100644 --- a/flopy/utils/observationfile.py +++ b/flopy/utils/observationfile.py @@ -2,6 +2,7 @@ import io from ..utils.utils_def import FlopyBinaryData from ..utils.flopy_io import get_ts_sp +from ..utils import import_optional_dependency class ObsFiles(FlopyBinaryData): @@ -174,12 +175,9 @@ def get_dataframe( """ - try: - import pandas as pd - from ..utils.utils_def import totim_to_datetime - except Exception as e: - msg = f"ObsFiles.get_dataframe() error import pandas: {e!s}" - raise ImportError(msg) + from ..utils.utils_def import totim_to_datetime + extra = "ObsFiles.get_dataframe() requires pandas." + pd = import_optional_dependency("pandas", extra=extra) i0 = 0 i1 = self.data.shape[0] diff --git a/flopy/utils/parse_version.py b/flopy/utils/parse_version.py new file mode 100644 index 0000000000..45e1b8e6cd --- /dev/null +++ b/flopy/utils/parse_version.py @@ -0,0 +1,602 @@ +# Vendored from https://github.com/pypa/packaging/blob/main/packaging/_structures.py +# and https://github.com/pypa/packaging/blob/main/packaging/_structures.py +# changeset ae891fd74d6dd4c6063bb04f2faeadaac6fc6313 +# 04/30/2021 + +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import annotations + +import collections +import itertools +import re +from typing import ( + Callable, + Iterator, + SupportsInt, + Tuple, + Union, +) +import warnings + +__all__ = [ + "parse", + "Version", + "LegacyVersion", + "InvalidVersion", + "VERSION_PATTERN", +] + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) + + def __ne__(self, other: object) -> bool: + return not isinstance(other, type(self)) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> NegativeInfinityType: + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) + + def __ne__(self, other: object) -> bool: + return not isinstance(other, type(self)) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() + + +InfiniteTypes = Union[InfinityType, NegativeInfinityType] +PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] +SubLocalType = Union[InfiniteTypes, int, str] +LocalType = Union[ + NegativeInfinityType, + Tuple[ + Union[ + SubLocalType, + Tuple[SubLocalType, str], + Tuple[NegativeInfinityType, SubLocalType], + ], + ..., + ], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + PrePostDevType, + PrePostDevType, + PrePostDevType, + LocalType, +] +LegacyCmpKey = Tuple[int, Tuple[str, ...]] +VersionComparisonMethod = Callable[ + [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool +] + +_Version = collections.namedtuple( + "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +) + + +def parse(version: str) -> LegacyVersion | Version: + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion: + _key: CmpKey | LegacyCmpKey + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +class LegacyVersion(_BaseVersion): + def __init__(self, version: str) -> None: + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + warnings.warn( + "Creating a LegacyVersion has been deprecated and will be " + "removed in the next major release.", + DeprecationWarning, + ) + + def __str__(self) -> str: + return self._version + + def __repr__(self) -> str: + return f"" + + @property + def public(self) -> str: + return self._version + + @property + def base_version(self) -> str: + return self._version + + @property + def epoch(self) -> int: + return -1 + + @property + def release(self) -> None: + return None + + @property + def pre(self) -> None: + return None + + @property + def post(self) -> None: + return None + + @property + def dev(self) -> None: + return None + + @property + def local(self) -> None: + return None + + @property + def is_prerelease(self) -> bool: + return False + + @property + def is_postrelease(self) -> bool: + return False + + @property + def is_devrelease(self) -> bool: + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE +) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s: str) -> Iterator[str]: + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version: str) -> LegacyCmpKey: + + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts: list[str] = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + + return epoch, tuple(parts) + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE
+    )
+
+    def __init__(self, version: str) -> None:
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(
+                match.group("pre_l"), match.group("pre_n")
+            ),
+            post=_parse_letter_version(
+                match.group("post_l"),
+                match.group("post_n1") or match.group("post_n2"),
+            ),
+            dev=_parse_letter_version(
+                match.group("dev_l"), match.group("dev_n")
+            ),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        return f""
+
+    def __str__(self) -> str:
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join([str(x) for x in self.release]))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join([str(x) for x in self.pre]))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        _epoch: int = self._version.epoch
+        return _epoch
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        _release: tuple[int, ...] = self._version.release
+        return _release
+
+    @property
+    def pre(self) -> tuple[str, int] | None:
+        _pre: tuple[str, int] | None = self._version.pre
+        return _pre
+
+    @property
+    def post(self) -> int | None:
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> int | None:
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> str | None:
+        if self._version.local:
+            return ".".join([str(x) for x in self._version.local])
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join([str(x) for x in self.release]))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: str, number: str | bytes | SupportsInt
+) -> tuple[str, int] | None:
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str) -> LocalType | None:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: tuple[SubLocalType] | None,
+) -> CmpKey:
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(
+            list(itertools.dropwhile(lambda x: x == 0, reversed(release)))
+        )
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: PrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: PrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: PrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: LocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i)
+            for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/flopy/utils/rasters.py b/flopy/utils/rasters.py
index de7dc6ab64..e5b39662e4 100644
--- a/flopy/utils/rasters.py
+++ b/flopy/utils/rasters.py
@@ -59,7 +59,9 @@ def __init__(
     ):
         from .geometry import point_in_polygon
 
-        self._rasterio = import_optional_dependency("rasterio")
+        rasterio = import_optional_dependency("rasterio")
+        from rasterio.crs import CRS
+
         self._affine = import_optional_dependency("affine")
 
         self._point_in_polygon = point_in_polygon
@@ -86,12 +88,12 @@ def __init__(
 
         meta["dtype"] = dtype
 
-        if isinstance(crs, self._rasterio.crs.CRS):
+        if isinstance(crs, CRS):
             pass
         elif isinstance(crs, int):
-            crs = self._rasterio.crs.CRS.from_epsg(crs)
+            crs = CRS.from_epsg(crs)
         elif isinstance(crs, str):
-            crs = self._rasterio.crs.CRS.from_string(crs)
+            crs = CRS.from_string(crs)
         else:
             TypeError("crs type not understood, provide an epsg or proj4")
 
@@ -116,7 +118,7 @@ def __init__(
         self.__xcenters = None
         self.__ycenters = None
 
-        if isinstance(rio_ds, self._rasterio.io.DatasetReader):
+        if isinstance(rio_ds, rasterio.io.DatasetReader):
             self._dataset = rio_ds
 
     @property
@@ -662,6 +664,9 @@ def _sample_rio_dataset(self, polygon, invert):
             tuple : (arr_dict, raster_crp_meta)
 
         """
+        import_optional_dependency("rasterio")
+        from rasterio.mask import mask
+
         from .geospatial_utils import GeoSpatialUtil
 
         if isinstance(polygon, (list, tuple, np.ndarray)):
@@ -670,7 +675,7 @@ def _sample_rio_dataset(self, polygon, invert):
         geom = GeoSpatialUtil(polygon, shapetype="Polygon")
         shapes = [geom]
 
-        rstr_crp, rstr_crp_affine = self._rasterio.mask.mask(
+        rstr_crp, rstr_crp_affine = mask(
             self._dataset, shapes, crop=True, invert=invert
         )
 
@@ -778,11 +783,12 @@ def write(self, name):
             output raster .tif file name
 
         """
+        rasterio = import_optional_dependency("rasterio")
 
         if not name.endswith(".tif"):
             name += ".tif"
 
-        with self._rasterio.open(name, "w", **self._meta) as foo:
+        with rasterio.open(name, "w", **self._meta) as foo:
             for band, arr in self.__arr_dict.items():
                 foo.write(arr, band)
 
@@ -838,8 +844,11 @@ def plot(self, ax=None, contour=False, **kwargs):
             ax : matplotlib.pyplot.axes
 
         """
+        import_optional_dependency("rasterio")
+        from rasterio.plot import show
+
         if self._dataset is not None:
-            ax = self._rasterio.plot.show(
+            ax = show(
                 self._dataset,
                 ax=ax,
                 contour=contour,
@@ -862,7 +871,7 @@ def plot(self, ax=None, contour=False, **kwargs):
                 i += 1
 
             data = np.ma.masked_where(data == self.nodatavals, data)
-            ax = self._rasterio.plot.show(
+            ax = show(
                 data,
                 ax=ax,
                 contour=contour,
@@ -891,12 +900,14 @@ def histogram(self, ax=None, **kwargs):
             ax : matplotlib.pyplot.axes
 
         """
+        import_optional_dependency("rasterio")
+        from rasterio.plot import show_hist
 
         if "alpha" not in kwargs:
             kwargs["alpha"] = 0.3
 
         if self._dataset is not None:
-            ax = self._rasterio.plot.show_hist(self._dataset, ax=ax, **kwargs)
+            ax = show_hist(self._dataset, ax=ax, **kwargs)
 
         else:
             d0 = len(self.__arr_dict)
@@ -914,6 +925,6 @@ def histogram(self, ax=None, **kwargs):
                 i += 1
 
             data = np.ma.masked_where(data == self.nodatavals, data)
-            ax = self._rasterio.plot.show_hist(data, ax=ax, **kwargs)
+            ax = show_hist(data, ax=ax, **kwargs)
 
         return ax
diff --git a/flopy/utils/sfroutputfile.py b/flopy/utils/sfroutputfile.py
index 1cb50b2f3e..861913e786 100644
--- a/flopy/utils/sfroutputfile.py
+++ b/flopy/utils/sfroutputfile.py
@@ -1,4 +1,5 @@
 import numpy as np
+from ..utils import import_optional_dependency
 
 
 class SfrFile:
@@ -49,14 +50,8 @@ def __init__(self, filename, geometries=None, verbose=False):
         """
         Class constructor.
         """
-        try:
-            import pandas as pd
 
-            self.pd = pd
-        except ImportError:
-            print("This method requires pandas")
-            self.pd = None
-            return
+        self.pd = import_optional_dependency("pandas")
 
         # get the number of rows to skip at top, and the number of data columns
         self.filename = filename
diff --git a/flopy/utils/triangle.py b/flopy/utils/triangle.py
index 181d5533a6..17fc9368a7 100644
--- a/flopy/utils/triangle.py
+++ b/flopy/utils/triangle.py
@@ -1,5 +1,6 @@
 import os
 import numpy as np
+import matplotlib.pyplot as plt
 import subprocess
 from ..mbase import which
 from ..utils.cvfdutil import centroid_of_polygon
@@ -307,12 +308,6 @@ def plot_boundary(self, ibm, ax=None, **kwargs):
         None
 
         """
-        try:
-            import matplotlib.pyplot as plt
-        except:
-            raise ImportError(
-                "matplotlib must be installed to use triangle.plot_boundary()"
-            )
         if ax is None:
             ax = plt.gca()
         idx = np.where(self.edge["boundary_marker"] == ibm)[0]
@@ -343,12 +338,6 @@ def plot_vertices(self, ax=None, **kwargs):
         None
 
         """
-        try:
-            import matplotlib.pyplot as plt
-        except:
-            raise ImportError(
-                "matplotlib must be installed to use triangle.plot_vertices()"
-            )
         if ax is None:
             ax = plt.gca()
         ax.plot(self.node["x"], self.node["y"], lw=0, **kwargs)
@@ -375,12 +364,6 @@ def label_vertices(self, ax=None, onebased=True, **kwargs):
         None
 
         """
-        try:
-            import matplotlib.pyplot as plt
-        except:
-            raise ImportError(
-                "matplotlib must be installed to use triangle.label_vertices()"
-            )
         if ax is None:
             ax = plt.gca()
         for i in range(self.verts.shape[0]):
@@ -409,13 +392,6 @@ def plot_centroids(self, ax=None, **kwargs):
         None
 
         """
-        try:
-            import matplotlib.pyplot as plt
-        except:
-            raise ImportError(
-                "matplotlib must be installed to use triangle.plot_centroids()"
-            )
-
         if ax is None:
             ax = plt.gca()
         xcyc = self.get_xcyc()
@@ -443,12 +419,6 @@ def label_cells(self, ax=None, onebased=True, **kwargs):
         None
 
         """
-        try:
-            import matplotlib.pyplot as plt
-        except:
-            raise ImportError(
-                "matplotlib must be installed to use triangle.lavel_cells()"
-            )
         if ax is None:
             ax = plt.gca()
         xcyc = self.get_xcyc()
diff --git a/flopy/utils/util_list.py b/flopy/utils/util_list.py
index 209afc9f79..8e07823817 100644
--- a/flopy/utils/util_list.py
+++ b/flopy/utils/util_list.py
@@ -12,6 +12,7 @@
 import numpy as np
 from ..datbase import DataInterface, DataListInterface, DataType
 from ..utils.recarray_utils import create_empty_recarray
+from ..utils import import_optional_dependency
 
 
 class MfList(DataInterface, DataListInterface):
@@ -441,11 +442,8 @@ def get_dataframe(self, squeeze=False):
         Requires pandas.
 
         """
-        try:
-            import pandas as pd
-        except Exception as e:
-            msg = "MfList.get_dataframe() requires pandas"
-            raise ImportError(msg)
+        extra = "MfList.get_dataframe() requires pandas."
+        pd = import_optional_dependency("pandas", extra=extra)
 
         # make a dataframe of all data for all stress periods
         names = ["per", "k", "i", "j"]
diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py
index 69da4885dc..b1f5c65836 100644
--- a/flopy/utils/utl_import.py
+++ b/flopy/utils/utl_import.py
@@ -5,9 +5,12 @@
 import types
 import warnings
 
+from .parse_version import Version
+
 # Update install.rst when updating versions!
 
 VERSIONS = {
+    "shapefile": "2.0.0",
     # "bs4": "4.8.2",
     # "bottleneck": "1.3.1",
     # "fsspec": "0.7.4",
@@ -39,6 +42,7 @@
 
 INSTALL_MAPPING = {
     "shapefile": "pyshp",
+    "dateutil": "python-dateutil",
 }
 
 
diff --git a/flopy/utils/voronoi.py b/flopy/utils/voronoi.py
index 48376bc873..64f5512a53 100644
--- a/flopy/utils/voronoi.py
+++ b/flopy/utils/voronoi.py
@@ -346,7 +346,7 @@ def plot(self, ax=None, plot_title=True, **kwargs):
             axes that contains the voronoi model grid
 
         """
-        plt = import_optional_dependency("matplotlib.pyplot")
+        import matplotlib.pyplot as plt
 
         if ax is None:
             ax = plt.subplot(1, 1, 1, aspect="equal")
diff --git a/flopy/utils/zonbud.py b/flopy/utils/zonbud.py
index 16d1853829..336ac873cf 100644
--- a/flopy/utils/zonbud.py
+++ b/flopy/utils/zonbud.py
@@ -3,6 +3,7 @@
 import numpy as np
 from itertools import groupby
 from .utils_def import totim_to_datetime
+from . import import_optional_dependency
 
 
 class ZoneBudget:
@@ -2368,11 +2369,8 @@ def _recarray_to_dataframe(
 
     pd.DataFrame
     """
-    try:
-        import pandas as pd
-    except Exception as e:
-        msg = f"ZoneBudget.get_dataframes() error import pandas: {e!s}"
-        raise ImportError(msg)
+    extra = "ZoneBudget.get_dataframes() requires pandas."
+    pd = import_optional_dependency("pandas", extra=extra)
 
     valid_index_keys = ["totim", "kstpkper"]
     s = f'index_key "{index_key}" is not valid.'
@@ -2993,7 +2991,8 @@ def _volumetric_flux(recarray, modeltime, extrapolate_kper=False):
         pd.DataFrame
 
     """
-    import pandas as pd
+    extra = "ZoneBudget._volumetric_flux() requires pandas."
+    pd = import_optional_dependency("pandas", extra=extra)
 
     nper = len(modeltime.nstp)
     volumetric_data = {}
diff --git a/setup.py b/setup.py
index 22b3ff139e..79c5c28fa0 100644
--- a/setup.py
+++ b/setup.py
@@ -30,6 +30,7 @@
     platforms="Windows, Mac OS-X, Linux",
     install_requires=[
         "numpy >=1.15",
+        "matplotlib",
     ],
     packages=[
         "flopy",

From e3dbd0eb28155c361202407c2dfa3f930ed6ac75 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 14:22:26 -0500
Subject: [PATCH 03/15] refactor(imports): add function to import optional
 packages

---
 flopy/utils/gridintersect.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/flopy/utils/gridintersect.py b/flopy/utils/gridintersect.py
index f1ff8813e9..2a2aa44d63 100644
--- a/flopy/utils/gridintersect.py
+++ b/flopy/utils/gridintersect.py
@@ -32,11 +32,12 @@
     SHAPELY_GE_20 = False
     SHAPELY_LT_18 = False
 
+shapely_warning = None
 if shapely is not None:
     try:
         from shapely.errors import ShapelyDeprecationWarning as shapely_warning
     except ImportError:
-        shapely_warning = None
+        pass
 
 if shapely_warning is not None and not SHAPELY_GE_20:
 

From 55d3995801893727ad744195d7fa6dc022a7051b Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 14:36:45 -0500
Subject: [PATCH 04/15] refactor(imports): add function to import optional
 packages

---
 flopy/utils/geospatial_utils.py | 8 ++++----
 flopy/utils/gridintersect.py    | 2 +-
 flopy/utils/utl_import.py       | 7 +++++--
 3 files changed, 10 insertions(+), 7 deletions(-)

diff --git a/flopy/utils/geospatial_utils.py b/flopy/utils/geospatial_utils.py
index d6ff9efec0..af7e107796 100644
--- a/flopy/utils/geospatial_utils.py
+++ b/flopy/utils/geospatial_utils.py
@@ -3,7 +3,7 @@
 from ..utils.geometry import Shape, Collection
 from ..utils import import_optional_dependency
 
-shapely = import_optional_dependency("shapely", errors="ignore")
+shapely = import_optional_dependency("shapely", errors="silent")
 if shapely is not None:
     from shapely.geometry import (
         MultiPolygon,
@@ -14,7 +14,7 @@
         MultiLineString,
     )
 
-geojson = import_optional_dependency("geojson", errors="ignore")
+geojson = import_optional_dependency("geojson", errors="silent")
 geojson_classes = {}
 if geojson is not None:
     geojson_classes = {
@@ -58,7 +58,7 @@ class GeoSpatialUtil:
 
     def __init__(self, obj, shapetype=None):
         self.__shapefile = import_optional_dependency(
-            "shapefile", errors="ignore"
+            "shapefile", errors="silent"
         )
         self.__obj = obj
         self.__geo_interface = {}
@@ -262,7 +262,7 @@ class GeoSpatialCollection:
     def __init__(self, obj, shapetype=None):
 
         self.__shapefile = import_optional_dependency(
-            "shapefile", errors="ignore"
+            "shapefile", errors="silent"
         )
         self.__obj = obj
         self.__collection = []
diff --git a/flopy/utils/gridintersect.py b/flopy/utils/gridintersect.py
index 2a2aa44d63..c66a592d68 100644
--- a/flopy/utils/gridintersect.py
+++ b/flopy/utils/gridintersect.py
@@ -5,7 +5,7 @@
 from .geometry import transform
 from .geospatial_utils import GeoSpatialUtil
 
-shapely = import_optional_dependency("shapely", errors="ignore")
+shapely = import_optional_dependency("shapely", errors="silent")
 if shapely is not None:
     from shapely.geometry import (
         MultiPoint,
diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py
index b1f5c65836..b62b38fa13 100644
--- a/flopy/utils/utl_import.py
+++ b/flopy/utils/utl_import.py
@@ -86,6 +86,8 @@ def import_optional_dependency(
           return the module, even if the version is too old.
           It's expected that users validate the version locally when
           using ``errors="ignore"`` (see. ``io/html.py``)
+        * silent: Same as "ignore" except warning message is not written to
+          the screen.
     min_version : str, default None
         Specify a minimum version that is different from the global pandas
         minimum version required.
@@ -98,7 +100,7 @@ def import_optional_dependency(
         is ``'warn'``.
     """
 
-    assert errors in {"warn", "raise", "ignore"}
+    assert errors in {"warn", "raise", "ignore", "silent"}
 
     package_name = INSTALL_MAPPING.get(name)
     install_name = package_name if package_name is not None else name
@@ -130,7 +132,8 @@ def import_optional_dependency(
         version = get_version(module_to_get)
         if Version(version) < Version(minimum_version):
             msg = (
-                f"Pandas requires version '{minimum_version}' or newer of '{parent}' "
+                f"FloPy requires version '{minimum_version}' "
+                f"or newer of '{parent}' "
                 f"(version '{version}' currently installed)."
             )
             if errors == "warn":

From 31105234a6b17766d4be701cd8a223692ab028da Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 14:39:01 -0500
Subject: [PATCH 05/15] refactor(imports): add function to import optional
 packages

---
 flopy/utils/utl_import.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py
index b62b38fa13..5ef1e069a8 100644
--- a/flopy/utils/utl_import.py
+++ b/flopy/utils/utl_import.py
@@ -115,7 +115,8 @@ def import_optional_dependency(
         if errors == "raise":
             raise ImportError(msg)
         else:
-            print(msg)
+            if errors != "silent":
+                print(msg)
             return None
 
     # Handle submodules: if we have submodule, grab parent module from sys.modules

From 98367553d82bebf56dc868451b7a782cd302dfb0 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 14:46:18 -0500
Subject: [PATCH 06/15] refactor(imports): add function to import optional
 packages

---
 flopy/utils/flopy_io.py        | 1 +
 flopy/utils/mtlistfile.py      | 1 +
 flopy/utils/observationfile.py | 1 +
 3 files changed, 3 insertions(+)

diff --git a/flopy/utils/flopy_io.py b/flopy/utils/flopy_io.py
index 0fefeec450..c263a01722 100755
--- a/flopy/utils/flopy_io.py
+++ b/flopy/utils/flopy_io.py
@@ -6,6 +6,7 @@
 import numpy as np
 from ..utils import import_optional_dependency
 
+
 def _fmt_string(array, float_format="{}"):
     """
     makes a formatting string for a rec-array;
diff --git a/flopy/utils/mtlistfile.py b/flopy/utils/mtlistfile.py
index 1d1232ccfc..a0d67e31aa 100644
--- a/flopy/utils/mtlistfile.py
+++ b/flopy/utils/mtlistfile.py
@@ -8,6 +8,7 @@
 
 from ..utils import import_optional_dependency
 
+
 class MtListBudget:
     """
     MT3D mass budget reader
diff --git a/flopy/utils/observationfile.py b/flopy/utils/observationfile.py
index 5c9081f8ff..73c4cdf5df 100644
--- a/flopy/utils/observationfile.py
+++ b/flopy/utils/observationfile.py
@@ -176,6 +176,7 @@ def get_dataframe(
         """
 
         from ..utils.utils_def import totim_to_datetime
+
         extra = "ObsFiles.get_dataframe() requires pandas."
         pd = import_optional_dependency("pandas", extra=extra)
 

From a167870846cfb13668ac5da6edd0f69f6a0eb4b7 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 15:13:24 -0500
Subject: [PATCH 07/15] refactor(imports): add function to import optional
 packages

---
 flopy/discretization/structuredgrid.py   |  4 ----
 flopy/discretization/unstructuredgrid.py |  5 +----
 flopy/discretization/vertexgrid.py       | 17 ++-------------
 flopy/export/utils.py                    | 27 +++++++++---------------
 4 files changed, 13 insertions(+), 40 deletions(-)

diff --git a/flopy/discretization/structuredgrid.py b/flopy/discretization/structuredgrid.py
index 8d300a273d..0fe6309614 100644
--- a/flopy/discretization/structuredgrid.py
+++ b/flopy/discretization/structuredgrid.py
@@ -725,10 +725,6 @@ def map_polygons(self):
         -------
             list of Polygon objects
         """
-        try:
-            import matplotlib.path as mpath
-        except ImportError:
-            raise ImportError("matplotlib required to use this method")
         cache_index = "xyzgrid"
         if (
             cache_index not in self._cache_dict
diff --git a/flopy/discretization/unstructuredgrid.py b/flopy/discretization/unstructuredgrid.py
index 1955a8840f..05983f3ede 100644
--- a/flopy/discretization/unstructuredgrid.py
+++ b/flopy/discretization/unstructuredgrid.py
@@ -471,10 +471,7 @@ def map_polygons(self):
         -------
             list or dict of matplotlib.collections.Polygon
         """
-        try:
-            from matplotlib.path import Path
-        except ImportError:
-            raise ImportError("matplotlib required to use this method")
+        from matplotlib.path import Path
 
         cache_index = "xyzgrid"
         if (
diff --git a/flopy/discretization/vertexgrid.py b/flopy/discretization/vertexgrid.py
index 40e8555531..2a57833660 100644
--- a/flopy/discretization/vertexgrid.py
+++ b/flopy/discretization/vertexgrid.py
@@ -2,10 +2,7 @@
 import copy
 import numpy as np
 
-try:
-    from matplotlib.path import Path
-except (ImportError, RuntimeError):
-    Path = None
+from matplotlib.path import Path
 
 from .grid import Grid, CachedData
 from ..utils.geometry import is_clockwise
@@ -226,10 +223,6 @@ def map_polygons(self):
         -------
             list of Polygon objects
         """
-        try:
-            import matplotlib.path as mpath
-        except ImportError:
-            raise ImportError("matplotlib required to use this method")
         cache_index = "xyzgrid"
         if (
             cache_index not in self._cache_dict
@@ -239,7 +232,7 @@ def map_polygons(self):
             self._polygons = None
         if self._polygons is None:
             self._polygons = [
-                mpath.Path(self.get_cell_vertices(nn))
+                Path(self.get_cell_vertices(nn))
                 for nn in range(self.ncpl)
             ]
 
@@ -270,12 +263,6 @@ def intersect(self, x, y, local=False, forgive=False):
             The CELL2D number
 
         """
-        if Path is None:
-            s = (
-                "Could not import matplotlib.  Must install matplotlib "
-                "in order to use VertexGrid.intersect() method"
-            )
-            raise ImportError(s)
 
         if local:
             # transform x and y to real-world coordinates
diff --git a/flopy/export/utils.py b/flopy/export/utils.py
index bf610082ef..ed5869f3f2 100644
--- a/flopy/export/utils.py
+++ b/flopy/export/utils.py
@@ -1511,23 +1511,16 @@ def export_array(
         a = a.copy()
         a[np.isnan(a)] = nodata
         if modelgrid.angrot != 0:
-            ndimage = import_optional_dependency("scipy.ndimage")
-            from ndimage import rotate
-
-            try:
-                from scipy.ndimage import rotate
-            except ImportError:
-                rotate = None
-                print("scipy package required to export rotated grid.")
-
-            if rotate is not None:
-                a = rotate(a, modelgrid.angrot, cval=nodata)
-                height_rot, width_rot = a.shape
-                xmin, xmax, ymin, ymax = modelgrid.extent
-                dx = (xmax - xmin) / width_rot
-                dy = (ymax - ymin) / height_rot
-                cellsize = np.max((dx, dy))
-                xoffset, yoffset = xmin, ymin
+            extra = "exporting rotated grids requires SciPy."
+            ndimage = import_optional_dependency("scipy.ndimage", extra=extra)
+
+            a = ndimage.rotate(a, modelgrid.angrot, cval=nodata)
+            height_rot, width_rot = a.shape
+            xmin, xmax, ymin, ymax = modelgrid.extent
+            dx = (xmax - xmin) / width_rot
+            dy = (ymax - ymin) / height_rot
+            cellsize = np.max((dx, dy))
+            xoffset, yoffset = xmin, ymin
 
         filename = (
             ".".join(filename.split(".")[:-1]) + ".asc"

From 5c8b069d4791185f930317b1d9fa369f38c81b58 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 15:15:50 -0500
Subject: [PATCH 08/15] refactor(imports): add function to import optional
 packages

---
 flopy/discretization/vertexgrid.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/flopy/discretization/vertexgrid.py b/flopy/discretization/vertexgrid.py
index 2a57833660..207bc972ed 100644
--- a/flopy/discretization/vertexgrid.py
+++ b/flopy/discretization/vertexgrid.py
@@ -232,8 +232,7 @@ def map_polygons(self):
             self._polygons = None
         if self._polygons is None:
             self._polygons = [
-                Path(self.get_cell_vertices(nn))
-                for nn in range(self.ncpl)
+                Path(self.get_cell_vertices(nn)) for nn in range(self.ncpl)
             ]
 
         return copy.copy(self._polygons)

From 4dee6aa44a95c6ba61130abc652f0aa1db5defde Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 17:40:10 -0500
Subject: [PATCH 09/15] refactor(imports): add function to import optional
 packages

---
 autotest/t069_test_vtkexportmodel.py |   2 +-
 autotest/t075_test_ugrid.py          |  16 ++--
 autotest/t420_test.py                |  20 +++--
 flopy/export/shapefile_utils.py      |  10 +--
 flopy/export/utils.py                |  21 ++---
 flopy/plot/crosssection.py           |  14 +--
 flopy/plot/map.py                    |  12 +--
 flopy/plot/plotutil.py               |   9 +-
 flopy/plot/styles.py                 |   3 -
 flopy/utils/geospatial_utils.py      |  97 +++++++++------------
 flopy/utils/gridintersect.py         | 122 ++++++++++++++++-----------
 flopy/utils/voronoi.py               |   9 +-
 12 files changed, 154 insertions(+), 181 deletions(-)

diff --git a/autotest/t069_test_vtkexportmodel.py b/autotest/t069_test_vtkexportmodel.py
index d9950f328b..46359f3d4d 100644
--- a/autotest/t069_test_vtkexportmodel.py
+++ b/autotest/t069_test_vtkexportmodel.py
@@ -1,5 +1,5 @@
 """
-Test vtk export_model function without packages_names definition 
+Test vtk export_model function without packages_names definition
 """
 
 import os
diff --git a/autotest/t075_test_ugrid.py b/autotest/t075_test_ugrid.py
index 55f96c9a8a..4a9e199d22 100644
--- a/autotest/t075_test_ugrid.py
+++ b/autotest/t075_test_ugrid.py
@@ -225,7 +225,11 @@ def test_triangle_unstructured_grid():
     xc, yc = tri.get_xcyc().T
     ncpl = np.array([len(iverts)])
     g = UnstructuredGrid(
-        vertices=verts, iverts=iverts, ncpl=ncpl, xcenters=xc, ycenters=yc,
+        vertices=verts,
+        iverts=iverts,
+        ncpl=ncpl,
+        xcenters=xc,
+        ycenters=yc,
     )
     assert len(g.grid_lines) == 8190
     assert g.nnodes == g.ncpl == 2730
@@ -293,7 +297,7 @@ def test_voronoi_grid0(plot=False):
     gridprops = vor.get_gridprops_vertexgrid()
     ncpl = gridprops["ncpl"]
     assert (
-            ncpl == answer_ncpl
+        ncpl == answer_ncpl
     ), f"Number of cells should be {answer_ncpl}. Found {ncpl}"
 
     voronoi_grid = VertexGrid(**gridprops, nlay=1)
@@ -329,7 +333,7 @@ def test_voronoi_grid1(plot=False):
     voronoi_grid = VertexGrid(**gridprops, nlay=1)
     ncpl = gridprops["ncpl"]
     assert (
-            ncpl == answer_ncpl
+        ncpl == answer_ncpl
     ), f"Number of cells should be {answer_ncpl}. Found {ncpl}"
 
     if plot:
@@ -362,7 +366,7 @@ def test_voronoi_grid2(plot=False):
     voronoi_grid = VertexGrid(**gridprops, nlay=1)
     ncpl = gridprops["ncpl"]
     assert (
-            ncpl == answer_ncpl
+        ncpl == answer_ncpl
     ), f"Number of cells should be {answer_ncpl}. Found {ncpl}"
 
     if plot:
@@ -405,7 +409,7 @@ def test_voronoi_grid3(plot=False):
     voronoi_grid = VertexGrid(**gridprops, nlay=1)
     ncpl = gridprops["ncpl"]
     assert (
-            ncpl == answer_ncpl
+        ncpl == answer_ncpl
     ), f"Number of cells should be {answer_ncpl}. Found {ncpl}"
 
     if plot:
@@ -441,7 +445,7 @@ def test_voronoi_grid4(plot=False):
     voronoi_grid = VertexGrid(**gridprops, nlay=1)
     ncpl = gridprops["ncpl"]
     assert (
-            ncpl == answer_ncpl
+        ncpl == answer_ncpl
     ), f"Number of cells should be {answer_ncpl}. Found {ncpl}"
 
     if plot:
diff --git a/autotest/t420_test.py b/autotest/t420_test.py
index 83ee82f1a9..bfb26f5cf5 100644
--- a/autotest/t420_test.py
+++ b/autotest/t420_test.py
@@ -49,6 +49,7 @@
 if not os.path.exists(gridgen_ws):
     os.makedirs(gridgen_ws)
 
+
 def test_mfusg():
 
     name = "dummy"
@@ -123,18 +124,23 @@ def test_mfusg():
 
         # test if single node idx works
         one_hds = flopy.utils.HeadUFile(head_file).get_ts(idx=300)
-        if one_hds[0,1] != head[0][300]:
-            raise AssertionError("Error head from 'get_ts' != head from 'get_data'")
+        if one_hds[0, 1] != head[0][300]:
+            raise AssertionError(
+                "Error head from 'get_ts' != head from 'get_data'"
+            )
 
         # test if list of nodes for idx works
-        nodes = [300,182,65]
+        nodes = [300, 182, 65]
 
         multi_hds = flopy.utils.HeadUFile(head_file).get_ts(idx=nodes)
         for i, node in enumerate(nodes):
-            if multi_hds[0, i+1] != head[0][node]:
-                raise AssertionError("Error head from 'get_ts' != head from 'get_data'")
+            if multi_hds[0, i + 1] != head[0][node]:
+                raise AssertionError(
+                    "Error head from 'get_ts' != head from 'get_data'"
+                )
 
     return
 
-if __name__ == '__main__':
-    test_mfusg()
\ No newline at end of file
+
+if __name__ == "__main__":
+    test_mfusg()
diff --git a/flopy/export/shapefile_utils.py b/flopy/export/shapefile_utils.py
index 21ee609147..9ded65c505 100755
--- a/flopy/export/shapefile_utils.py
+++ b/flopy/export/shapefile_utils.py
@@ -4,7 +4,6 @@
 """
 import copy
 import shutil
-import inspect
 import json
 import numpy as np
 import os
@@ -953,12 +952,9 @@ class EpsgReference:
     """
 
     def __init__(self):
-        try:
-            from appdirs import user_data_dir
-        except ImportError:
-            user_data_dir = None
-        if user_data_dir:
-            datadir = user_data_dir("flopy")
+        appdirs = import_optional_dependency("appdirs", errors="silent")
+        if appdirs is not None:
+            datadir = appdirs.user_data_dir("flopy")
         else:
             # if appdirs is not installed, use user's home directory
             datadir = os.path.join(os.path.expanduser("~"), ".flopy")
diff --git a/flopy/export/utils.py b/flopy/export/utils.py
index ed5869f3f2..384f96ff87 100644
--- a/flopy/export/utils.py
+++ b/flopy/export/utils.py
@@ -1548,21 +1548,17 @@ def export_array(
             or modelgrid.delr[0] != modelgrid.delc[0]
         ):
             raise ValueError("GeoTIFF export require a uniform grid.")
-        try:
-            import rasterio
-            from rasterio import Affine
-        except ImportError:
-            print("GeoTIFF export requires the rasterio package.")
-            return
+        extra = "GeoTIFF export requires the rasterio."
+        rasterio = import_optional_dependency("rasterio", extra=extra)
         dxdy = modelgrid.delc[0]
         # because this is only implemented for a structured grid,
         # we can get the xul and yul coordinate from modelgrid.xvertices(0, 0)
         verts = modelgrid.get_cell_vertices(0, 0)
         xul, yul = verts[0]
         trans = (
-            Affine.translation(xul, yul)
-            * Affine.rotation(modelgrid.angrot)
-            * Affine.scale(dxdy, -dxdy)
+            rasterio.Affine.translation(xul, yul)
+            * rasterio.Affine.rotation(modelgrid.angrot)
+            * rasterio.Affine.scale(dxdy, -dxdy)
         )
 
         # third dimension is the number of bands
@@ -1713,10 +1709,9 @@ def export_contourf(
 
     """
 
-    try:
-        from shapely import geometry
-    except ImportError:
-        raise ImportError("export_contourf requires python shapely package")
+    extra = "export_contourf requires shapely."
+    shapely = import_optional_dependency("shapely", extra=extra)
+    from shapely import geometry
 
     from ..utils.geometry import Polygon
     from .shapefile_utils import recarray2shp
diff --git a/flopy/plot/crosssection.py b/flopy/plot/crosssection.py
index 2c8cea4faf..8203d2474e 100644
--- a/flopy/plot/crosssection.py
+++ b/flopy/plot/crosssection.py
@@ -5,7 +5,7 @@
 from matplotlib.patches import Polygon
 
 from . import plotutil
-from ..utils import geometry, import_optional_dependency
+from ..utils import geometry
 
 import copy
 import warnings
@@ -54,12 +54,6 @@ def __init__(
 
         self.ax = ax
         self.geographic_coords = geographic_coords
-        if plt is None:
-            raise ImportError(
-                "Could not import matplotlib.  Must install matplotlib "
-                "in order to use ModelCrossSection method"
-            )
-
         self.model = model
 
         if modelgrid is not None:
@@ -502,11 +496,7 @@ def contour_array(self, a, masked_values=None, head=None, **kwargs):
         contour_set : matplotlib.pyplot.contour
 
         """
-        if plt is None:
-            err_msg = "matplotlib must be installed to use contour_array()"
-            raise ImportError(err_msg)
-        else:
-            import matplotlib.tri as tri
+        import matplotlib.tri as tri
 
         if not isinstance(a, np.ndarray):
             a = np.array(a)
diff --git a/flopy/plot/map.py b/flopy/plot/map.py
index eeb7f6e3fe..f3cd4c4de4 100644
--- a/flopy/plot/map.py
+++ b/flopy/plot/map.py
@@ -43,12 +43,6 @@ def __init__(
         self, model=None, modelgrid=None, ax=None, layer=0, extent=None
     ):
 
-        if plt is None:
-            raise ImportError(
-                "Could not import matplotlib.  Must install matplotlib "
-                "in order to use ModelMap method"
-            )
-
         self.model = model
         self.layer = layer
         self.mg = None
@@ -169,11 +163,7 @@ def contour_array(self, a, masked_values=None, **kwargs):
         contour_set : matplotlib.pyplot.contour
 
         """
-        try:
-            import matplotlib.tri as tri
-        except ImportError:
-            err_msg = "matplotlib must be installed to use contour_array()"
-            raise ImportError(err_msg)
+        import matplotlib.tri as tri
 
         a = np.copy(a)
         if not isinstance(a, np.ndarray):
diff --git a/flopy/plot/plotutil.py b/flopy/plot/plotutil.py
index 1f8b721db2..542a079371 100644
--- a/flopy/plot/plotutil.py
+++ b/flopy/plot/plotutil.py
@@ -2277,13 +2277,8 @@ def cvfd_to_patch_collection(verts, iverts):
         DeprecationWarning,
     )
 
-    if plt is None:
-        raise ImportError(
-            "matplotlib must be installed to use cvfd_to_patch_collection()"
-        )
-    else:
-        from matplotlib.patches import Polygon
-        from matplotlib.collections import PatchCollection
+    from matplotlib.patches import Polygon
+    from matplotlib.collections import PatchCollection
 
     ptchs = []
     for ivertlist in iverts:
diff --git a/flopy/plot/styles.py b/flopy/plot/styles.py
index 4480a85c5d..96272cd58d 100644
--- a/flopy/plot/styles.py
+++ b/flopy/plot/styles.py
@@ -49,7 +49,6 @@ def set_font_type(cls, family, fontname):
         -------
             None
         """
-        mpl = import_optional_dependency("matplotlib")
         mpl.rcParams["font.family"] = family
         mpl.rcParams[f"font.{family}"] = fontname
         return mpl.rcParams
@@ -432,8 +431,6 @@ def __set_fontspec(cls, bold=True, italic=True, fontsize=9, family=False):
         -------
             dict
         """
-        mpl = import_optional_dependency("matplotlib")
-
         family = mpl.rcParams["font.family"][0]
         font = mpl.rcParams[f"font.{family}"][0]
 
diff --git a/flopy/utils/geospatial_utils.py b/flopy/utils/geospatial_utils.py
index af7e107796..9c34ecf4fc 100644
--- a/flopy/utils/geospatial_utils.py
+++ b/flopy/utils/geospatial_utils.py
@@ -117,21 +117,19 @@ def __init__(self, obj, shapetype=None):
                     "coordinates": obj.coordinates,
                 }
 
-        if shapely is not None:
-            if isinstance(
-                obj,
-                (
-                    Point,
-                    MultiPoint,
-                    Polygon,
-                    MultiPolygon,
-                    LineString,
-                    MultiLineString,
-                ),
-            ):
-                self.__geo_interface = obj.__geo_interface__
-        else:
-            raise ModuleNotFoundError("shapely is not installed")
+        shapely_geo = import_optional_dependency("shapely.geometry")
+        if isinstance(
+            obj,
+            (
+                shapely_geo.Point,
+                shapely_geo.MultiPoint,
+                shapely_geo.Polygon,
+                shapely_geo.MultiPolygon,
+                shapely_geo.LineString,
+                shapely_geo.MultiLineString,
+            ),
+        ):
+            self.__geo_interface = obj.__geo_interface__
 
     @property
     def __geo_interface__(self):
@@ -179,12 +177,9 @@ def shapely(self):
         -------
             shapely.geometry.
         """
-        if shapely is not None:
-            if self._shapely is None:
-                self._shapely = shapely.geometry.shape(self.__geo_interface)
-            return self._shapely
-        else:
-            raise ModuleNotFoundError("shapely is not installed")
+        shapely_geo = import_optional_dependency("shapely.geometry")
+        self._shapely = shapely_geo.shape(self.__geo_interface)
+        return self._shapely
 
     @property
     def geojson(self):
@@ -195,13 +190,10 @@ def geojson(self):
         -------
             geojson.
         """
-        if geojson is not None:
-            if self._geojson is None:
-                cls = geojson_classes[self.__geo_interface["type"].lower()]
-                self._geojson = cls(self.__geo_interface["coordinates"])
-            return self._geojson
-        else:
-            raise ModuleNotFoundError("geojson is not installed")
+        import_optional_dependency("geojson")
+        cls = geojson_classes[self.__geo_interface["type"].lower()]
+        self._geojson = cls(self.__geo_interface["coordinates"])
+        return self._geojson
 
     @property
     def shape(self):
@@ -322,20 +314,18 @@ def __init__(self, obj, shapetype=None):
                 for geom in obj.geometries:
                     self.__collection.append(GeoSpatialUtil(geom))
 
-        if shapely is not None:
-            if isinstance(
-                obj,
-                (
-                    shapely.geometry.collection.GeometryCollection,
-                    MultiPoint,
-                    MultiLineString,
-                    MultiPolygon,
-                ),
-            ):
-                for geom in obj.geoms:
-                    self.__collection.append(GeoSpatialUtil(geom))
-        else:
-            raise ModuleNotFoundError("shapely is no installed")
+        shapely_loc = import_optional_dependency("shapely.geometry")
+        if isinstance(
+            obj,
+            (
+                shapely_loc.collection.GeometryCollection,
+                shapely_loc.MultiPoint,
+                shapely_loc.MultiLineString,
+                shapely_loc.MultiPolygon,
+            ),
+        ):
+            for geom in obj.geoms:
+                self.__collection.append(GeoSpatialUtil(geom))
 
     def __iter__(self):
         """
@@ -384,14 +374,10 @@ def shapely(self):
         -------
             shapely.geometry.collection.GeometryCollection object
         """
-        if shapely is not None:
-            if self._shapely is None:
-                self._shapely = shapely.geometry.collection.GeometryCollection(
-                    [i.shapely for i in self.__collection]
-                )
-        else:
-            raise ModuleNotFoundError("shapely is not installed")
-
+        shapely_loc = import_optional_dependency("shapely.geometry")
+        self._shapely = shapely_loc.collection.GeometryCollection(
+            [i.shapely for i in self.__collection]
+        )
         return self._shapely
 
     @property
@@ -403,13 +389,10 @@ def geojson(self):
         -------
             geojson.GeometryCollection
         """
-        if geojson is not None:
-            if self._geojson is None:
-                self._geojson = geojson.GeometryCollection(
-                    [i.geojson for i in self.__collection]
-                )
-        else:
-            raise ModuleNotFoundError("geojson is not installed")
+        geojson_loc = import_optional_dependency("geojson")
+        self._geojson = geojson_loc.GeometryCollection(
+            [i.geojson for i in self.__collection]
+        )
         return self._geojson
 
     @property
diff --git a/flopy/utils/gridintersect.py b/flopy/utils/gridintersect.py
index c66a592d68..413cab42a2 100644
--- a/flopy/utils/gridintersect.py
+++ b/flopy/utils/gridintersect.py
@@ -1,30 +1,16 @@
 import numpy as np
+import contextlib
+import warnings
+from distutils.version import LooseVersion
 
 from .utl_import import import_optional_dependency
 
 from .geometry import transform
 from .geospatial_utils import GeoSpatialUtil
 
-shapely = import_optional_dependency("shapely", errors="silent")
-if shapely is not None:
-    from shapely.geometry import (
-        MultiPoint,
-        Point,
-        Polygon,
-        box,
-        GeometryCollection,
-        MultiPolygon,
-    )
-    from shapely.strtree import STRtree
-    from shapely.affinity import translate, rotate
-    from shapely.prepared import prep
-
-import contextlib
-import warnings
-from distutils.version import LooseVersion
-
 NUMPY_GE_121 = str(np.__version__) >= LooseVersion("1.21")
 
+shapely = import_optional_dependency("shapely", errors="silent")
 if shapely is not None:
     SHAPELY_GE_20 = str(shapely.__version__) >= LooseVersion("2.0")
     SHAPELY_LT_18 = str(shapely.__version__) < LooseVersion("1.8")
@@ -166,7 +152,6 @@ def __init__(self, mfgrid, method=None, rtree=True):
             loop through all model gridcells (which is generally slower).
             Only read when `method='vertex'`.
         """
-        import_optional_dependency("shapely")
 
         self.mfgrid = mfgrid
         if method is None:
@@ -183,7 +168,11 @@ def __init__(self, mfgrid, method=None, rtree=True):
 
             # build STR-tree if specified
             if self.rtree:
-                self.strtree = STRtree(self._get_gridshapes())
+                strtree = import_optional_dependency(
+                    "shapely.strtree",
+                    extra="STRTree requires shapely",
+                )
+                self.strtree = strtree.STRtree(self._get_gridshapes())
 
         elif self.method == "structured" and mfgrid.grid_type == "structured":
             pass
@@ -275,10 +264,12 @@ def _rect_grid_to_shape_generator(self):
         generator :
             generator of shapely Polygons
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         for i in range(self.mfgrid.nrow):
             for j in range(self.mfgrid.ncol):
                 xy = self.mfgrid.get_cell_vertices(i, j)
-                p = Polygon(xy)
+                p = shapely_geo.Polygon(xy)
                 p.name = (i, j)
                 yield p
 
@@ -302,6 +293,8 @@ def _vtx_grid_to_shape_generator(self):
         generator :
             generator of shapely Polygons
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         # for cell2d rec-arrays
         if isinstance(self.mfgrid._cell2d, np.recarray):
             for icell in self.mfgrid._cell2d.icell2d:
@@ -320,7 +313,7 @@ def _vtx_grid_to_shape_generator(self):
                 # close the polygon, if necessary
                 if points[0] != points[-1]:
                     points.append(points[0])
-                p = Polygon(points)
+                p = shapely_geo.Polygon(points)
                 p.name = icell
                 yield p
         # for cell2d lists
@@ -337,7 +330,7 @@ def _vtx_grid_to_shape_generator(self):
                 # close the polygon, if necessary
                 if points[0] != points[-1]:
                     points.append(points[0])
-                p = Polygon(points)
+                p = shapely_geo.Polygon(points)
                 p.name = icell
                 yield p
 
@@ -413,7 +406,8 @@ def filter_query_result(qresult, shp):
             filter or generator containing polygons that intersect with shape
         """
         # prepare shape for efficient batch intersection check
-        prepshp = prep(shp)
+        prepared = import_optional_dependency("shapely.prepared")
+        prepshp = prepared.prep(shp)
         # get only gridcells that intersect
         qfiltered = filter(prepshp.intersects, qresult)
         return qfiltered
@@ -461,10 +455,13 @@ def _intersect_point_shapely(self, shp, sort_by_cellid=True):
         numpy.recarray
             a record array containing information about the intersection
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+        prepared = import_optional_dependency("shapely.prepared")
+
         # query grid
         qresult = self.query_grid(shp)
         # prepare shape for efficient batch intersection check
-        prepshp = prep(shp)
+        prepshp = prepared.prep(shp)
         # get only gridcells that intersect
         qfiltered = filter(prepshp.intersects, qresult)
 
@@ -501,7 +498,7 @@ def _intersect_point_shapely(self, shp, sort_by_cellid=True):
             if len(cell_shps) > 0:
                 # combine new points in MultiPoint
                 isectshp.append(
-                    MultiPoint(cell_shps)
+                    shapely_geo.MultiPoint(cell_shps)
                     if len(cell_shps) > 1
                     else cell_shps[0]
                 )
@@ -607,6 +604,8 @@ def _intersect_polygon_shapely(self, shp, sort_by_cellid=True):
         numpy.recarray
             a record array containing information about the intersection
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         # query grid
         qresult = self.query_grid(shp)
         # filter result further if possible (only strtree and filter methods)
@@ -630,7 +629,7 @@ def _intersect_polygon_shapely(self, shp, sort_by_cellid=True):
                 [], intersect, shptyps=["Polygon", "MultiPolygon"]
             )
             if len(collection) > 1:
-                collection = [MultiPolygon(collection)]
+                collection = [shapely_geo.MultiPolygon(collection)]
             # loop over intersection result and store information
             for c in collection:
                 # don't store intersections with 0 area
@@ -696,13 +695,15 @@ def _intersect_point_structured(self, shp):
         numpy.recarray
             a record array containing information about the intersection
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         nodelist = []
 
         Xe, Ye = self.mfgrid.xyedges
 
-        if isinstance(shp, Point):
+        if isinstance(shp, shapely_geo.Point):
             shp = [shp]
-        elif isinstance(shp, MultiPoint):
+        elif isinstance(shp, shapely_geo.MultiPoint):
             shp = list(shp.geoms)
         else:
             raise ValueError("expected Point or MultiPoint")
@@ -754,7 +755,7 @@ def _intersect_point_structured(self, shp):
                 tempshapes.append(ixs)
             else:
                 # TODO: not sure if this is correct
-                tempshapes[-1] = MultiPoint([tempshapes[-1], ixs])
+                tempshapes[-1] = shapely_geo.MultiPoint([tempshapes[-1], ixs])
 
         ixshapes = tempshapes
         nodelist = tempnodes
@@ -784,6 +785,9 @@ def _intersect_linestring_structured(self, shp, keepzerolengths=False):
         numpy.recarray
             a record array containing information about the intersection
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+        affinity_loc = import_optional_dependency("shapely.affinity")
+
         # get local extent of grid
         if (
             self.mfgrid.angrot != 0.0
@@ -796,15 +800,17 @@ def _intersect_linestring_structured(self, shp, keepzerolengths=False):
             ymax = np.max(self.mfgrid.xyedges[1])
         else:
             xmin, xmax, ymin, ymax = self.mfgrid.extent
-        pl = box(xmin, ymin, xmax, ymax)
+        pl = shapely_geo.box(xmin, ymin, xmax, ymax)
 
         # rotate and translate linestring to local coords
         if self.mfgrid.xoffset != 0.0 or self.mfgrid.yoffset != 0.0:
-            shp = translate(
+            shp = affinity_loc.translate(
                 shp, xoff=-self.mfgrid.xoffset, yoff=-self.mfgrid.yoffset
             )
         if self.mfgrid.angrot != 0.0:
-            shp = rotate(shp, -self.mfgrid.angrot, origin=(0.0, 0.0))
+            shp = affinity_loc.rotate(
+                shp, -self.mfgrid.angrot, origin=(0.0, 0.0)
+            )
 
         # clip line to mfgrid bbox
         lineclip = shp.intersection(pl)
@@ -843,10 +849,10 @@ def _intersect_linestring_structured(self, shp, keepzerolengths=False):
                         v_realworld.append(list(zip(rx, ry)))
                     ixs_realworld = []
                     for ix in ixs:
-                        ix_realworld = rotate(
+                        ix_realworld = affinity_loc.rotate(
                             ix, self.mfgrid.angrot, origin=(0.0, 0.0)
                         )
-                        ix_realworld = translate(
+                        ix_realworld = affinity_loc.translate(
                             ix_realworld,
                             self.mfgrid.xoffset,
                             self.mfgrid.yoffset,
@@ -887,8 +893,10 @@ def _intersect_linestring_structured(self, shp, keepzerolengths=False):
 
                 ix_shapes_realworld = []
                 for ixs in ixshapes:
-                    ixs = rotate(ixs, self.mfgrid.angrot, origin=(0.0, 0.0))
-                    ixs = translate(
+                    ixs = affinity_loc.rotate(
+                        ixs, self.mfgrid.angrot, origin=(0.0, 0.0)
+                    )
+                    ixs = affinity_loc.translate(
                         ixs, self.mfgrid.xoffset, self.mfgrid.yoffset
                     )
                     ix_shapes_realworld.append(ixs)
@@ -963,6 +971,8 @@ def _get_nodes_intersecting_linestring(self, linestring):
             lists containing node ids, lengths of intersects and the
             start and end points of the intersects
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         nodelist = []
         lengths = []
         vertices = []
@@ -991,13 +1001,13 @@ def _get_nodes_intersecting_linestring(self, linestring):
             x0 = [x[0]]
             y0 = [y[0]]
 
-        (i, j) = self.intersect(Point(x0[0], y0[0])).cellids[0]
+        (i, j) = self.intersect(shapely_geo.Point(x0[0], y0[0])).cellids[0]
         Xe, Ye = self.mfgrid.xyedges
         xmin = Xe[j]
         xmax = Xe[j + 1]
         ymax = Ye[i]
         ymin = Ye[i + 1]
-        pl = box(xmin, ymin, xmax, ymax)
+        pl = shapely_geo.box(xmin, ymin, xmax, ymax)
         intersect = linestring.intersection(pl)
         # if linestring starts in cell, exits, and re-enters
         # a MultiLineString is returned.
@@ -1066,6 +1076,8 @@ def _check_adjacent_cells_intersecting_line(
             intersections with adjacent cells relative to the
             current cell (i, j)
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         i, j = i_j
 
         Xe, Ye = self.mfgrid.xyedges
@@ -1084,7 +1096,7 @@ def _check_adjacent_cells_intersecting_line(
                 xmax = Xe[jj + 1]
                 ymax = Ye[ii]
                 ymin = Ye[ii + 1]
-                pl = box(xmin, ymin, xmax, ymax)
+                pl = shapely_geo.box(xmin, ymin, xmax, ymax)
                 if linestring.intersects(pl):
                     intersect = linestring.intersection(pl)
                     ixshape.append(intersect)
@@ -1111,7 +1123,7 @@ def _check_adjacent_cells_intersecting_line(
                 xmax = Xe[jj + 1]
                 ymax = Ye[ii]
                 ymin = Ye[ii + 1]
-                pl = box(xmin, ymin, xmax, ymax)
+                pl = shapely_geo.box(xmin, ymin, xmax, ymax)
                 if linestring.intersects(pl):
                     intersect = linestring.intersection(pl)
                     ixshape.append(intersect)
@@ -1138,7 +1150,7 @@ def _check_adjacent_cells_intersecting_line(
                 xmax = Xe[jj + 1]
                 ymax = Ye[ii]
                 ymin = Ye[ii + 1]
-                pl = box(xmin, ymin, xmax, ymax)
+                pl = shapely_geo.box(xmin, ymin, xmax, ymax)
                 if linestring.intersects(pl):
                     intersect = linestring.intersection(pl)
                     ixshape.append(intersect)
@@ -1165,7 +1177,7 @@ def _check_adjacent_cells_intersecting_line(
                 xmax = Xe[jj + 1]
                 ymax = Ye[ii]
                 ymin = Ye[ii + 1]
-                pl = box(xmin, ymin, xmax, ymax)
+                pl = shapely_geo.box(xmin, ymin, xmax, ymax)
                 if linestring.intersects(pl):
                     intersect = linestring.intersection(pl)
                     ixshape.append(intersect)
@@ -1205,6 +1217,8 @@ def _intersect_rectangle_structured(self, rectangle):
             the rectangle intersects
         """
 
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         nodelist = []
 
         # return if rectangle does not contain any cells
@@ -1222,9 +1236,9 @@ def _intersect_rectangle_structured(self, rectangle):
             local_extent = self.mfgrid.extent
 
         xmin, xmax, ymin, ymax = local_extent
-        bgrid = box(xmin, ymin, xmax, ymax)
+        bgrid = shapely_geo.box(xmin, ymin, xmax, ymax)
         (rxmin, rymin), (rxmax, rymax) = rectangle
-        b = box(rxmin, rymin, rxmax, rymax)
+        b = shapely_geo.box(rxmin, rymin, rxmax, rymax)
 
         if not b.intersects(bgrid):
             # return with nodelist as an empty list
@@ -1286,6 +1300,8 @@ def _intersect_polygon_structured(self, shp):
         numpy.recarray
             a record array containing information about the intersection
         """
+        shapely_geo = import_optional_dependency("shapely.geometry")
+        affinity_loc = import_optional_dependency("shapely.affinity")
 
         # initialize the result lists
         nodelist = []
@@ -1295,11 +1311,13 @@ def _intersect_polygon_structured(self, shp):
 
         # transform polygon to local grid coordinates
         if self.mfgrid.xoffset != 0.0 or self.mfgrid.yoffset != 0.0:
-            shp = translate(
+            shp = affinity_loc.translate(
                 shp, xoff=-self.mfgrid.xoffset, yoff=-self.mfgrid.yoffset
             )
         if self.mfgrid.angrot != 0.0:
-            shp = rotate(shp, -self.mfgrid.angrot, origin=(0.0, 0.0))
+            shp = affinity_loc.rotate(
+                shp, -self.mfgrid.angrot, origin=(0.0, 0.0)
+            )
 
         # use the bounds of the polygon to restrict the cell search
         minx, miny, maxx, maxy = shp.bounds
@@ -1323,7 +1341,7 @@ def _intersect_polygon_structured(self, shp):
                 ]
             else:
                 cell_coords = self.mfgrid.get_cell_vertices(i, j)
-            node_polygon = Polygon(cell_coords)
+            node_polygon = shapely_geo.Polygon(cell_coords)
             if shp.intersects(node_polygon):
                 intersect = shp.intersection(node_polygon)
                 if intersect.area > 0.0:
@@ -1351,10 +1369,10 @@ def _intersect_polygon_structured(self, shp):
                                     intersect
                                 )
                             )
-                        intersect_realworld = rotate(
+                        intersect_realworld = affinity_loc.rotate(
                             intersect, self.mfgrid.angrot, origin=(0.0, 0.0)
                         )
-                        intersect_realworld = translate(
+                        intersect_realworld = affinity_loc.translate(
                             intersect_realworld,
                             self.mfgrid.xoffset,
                             self.mfgrid.yoffset,
@@ -1565,11 +1583,13 @@ def plot_point(rec, ax=None, **kwargs):
         """
         import matplotlib.pyplot as plt
 
+        shapely_geo = import_optional_dependency("shapely.geometry")
+
         if ax is None:
             _, ax = plt.subplots()
 
         x, y = [], []
-        geo_coll = GeometryCollection(list(rec.ixshapes))
+        geo_coll = shapely_geo.GeometryCollection(list(rec.ixshapes))
         collection = parse_shapely_ix_result([], geo_coll, ["Point"])
         for c in collection:
             x.append(c.x)
diff --git a/flopy/utils/voronoi.py b/flopy/utils/voronoi.py
index 64f5512a53..45b73881b9 100644
--- a/flopy/utils/voronoi.py
+++ b/flopy/utils/voronoi.py
@@ -30,13 +30,10 @@ def get_valid_faces(vor):
 
 # todo: send this to point in polygon method defined in Rasters
 def point_in_cell(point, vertices):
-    try:
-        from shapely.geometry import Point, Polygon
-    except:
-        raise ModuleNotFoundError("shapely is not installed")
+    shapely_geo = import_optional_dependency("shapely.geometry")
 
-    p = Point(point)
-    poly = Polygon(vertices)
+    p = shapely_geo.Point(point)
+    poly = shapely_geo.Polygon(vertices)
     if p.intersects(poly):
         return True
     else:

From 72ad51de25e74203e399ef8fe2d8ce078db3ad53 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 17:49:39 -0500
Subject: [PATCH 10/15] refactor(imports): add function to import optional
 packages

---
 flopy/utils/utl_import.py | 61 ++++++++++++++++++++++++---------------
 1 file changed, 37 insertions(+), 24 deletions(-)

diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py
index 5ef1e069a8..1c2abcc2fc 100644
--- a/flopy/utils/utl_import.py
+++ b/flopy/utils/utl_import.py
@@ -1,3 +1,40 @@
+# Vendored from https://github.com/pandas-dev/pandas/blob/master/pandas/compat/_optional.py
+# changeset d30aeeba0c79fb8e4b651a8f528e87c3de8cb898
+# 10/11/2021
+
+# This file is dual licensed under the terms of the BSD 3-Clause License.
+# BSD 3-Clause License
+#
+# Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team
+# All rights reserved.
+#
+# Copyright (c) 2011-2021, Open source contributors.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+#   list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+#   this list of conditions and the following disclaimer in the documentation
+#   and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+#   contributors may be used to endorse or promote products derived from
+#   this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
 from __future__ import annotations
 
 import importlib
@@ -11,30 +48,6 @@
 
 VERSIONS = {
     "shapefile": "2.0.0",
-    # "bs4": "4.8.2",
-    # "bottleneck": "1.3.1",
-    # "fsspec": "0.7.4",
-    # "fastparquet": "0.4.0",
-    # "gcsfs": "0.6.0",
-    # "lxml.etree": "4.5.0",
-    # "matplotlib": "3.3.2",
-    # "numexpr": "2.7.1",
-    # "odfpy": "1.4.1",
-    # "openpyxl": "3.0.2",
-    # "pandas_gbq": "0.14.0",
-    # "pyarrow": "0.17.0",
-    # "pytest": "6.0",
-    # "pyxlsb": "1.0.6",
-    # "s3fs": "0.4.0",
-    # "scipy": "1.4.1",
-    # "sqlalchemy": "1.3.11",
-    # "tables": "3.6.1",
-    # "tabulate": "0.8.7",
-    # "xarray": "0.15.1",
-    # "xlrd": "2.0.1",
-    # "xlwt": "1.3.0",
-    # "xlsxwriter": "1.2.2",
-    # "numba": "0.50.1",
 }
 
 # A mapping from import name to package name (on PyPI) for packages where

From abe29001bb8e31daac1d66a43e9ee9c1b459ba42 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 17:55:54 -0500
Subject: [PATCH 11/15] refactor(imports): add function to import optional
 packages

---
 autotest/t005_test.py |  8 ++------
 autotest/t016_test.py |  4 +---
 autotest/t080_test.py | 18 ++++++++++++------
 autotest/t506_test.py |  4 +---
 4 files changed, 16 insertions(+), 18 deletions(-)

diff --git a/autotest/t005_test.py b/autotest/t005_test.py
index b8cf1878b5..df7c190bdb 100644
--- a/autotest/t005_test.py
+++ b/autotest/t005_test.py
@@ -25,9 +25,7 @@ def test_modflow_unstructured():
     lpf = flopy.mfusg.MfUsgLpf(mf)
     assert isinstance(lpf, flopy.mfusg.MfUsgLpf)
 
-    wel = flopy.mfusg.MfUsgWel(
-        mf, stress_period_data={0: [[0, -100]]}
-    )
+    wel = flopy.mfusg.MfUsgWel(mf, stress_period_data={0: [[0, -100]]})
     assert isinstance(wel, flopy.mfusg.MfUsgWel)
 
     ghb = flopy.modflow.ModflowGhb(
@@ -44,9 +42,7 @@ def test_modflow_unstructured():
     # write well file
     wel.write_file()
     assert os.path.isfile(os.path.join(cpth, f"{mf.name}.wel")) is True
-    wel2 = flopy.mfusg.MfUsgWel.load(
-        os.path.join(cpth, f"{mf.name}.wel"), mf
-    )
+    wel2 = flopy.mfusg.MfUsgWel.load(os.path.join(cpth, f"{mf.name}.wel"), mf)
     assert wel2.stress_period_data[0] == wel.stress_period_data[0]
 
     # write ghb file
diff --git a/autotest/t016_test.py b/autotest/t016_test.py
index 73e0e95bf0..ddfc9c90fe 100644
--- a/autotest/t016_test.py
+++ b/autotest/t016_test.py
@@ -106,9 +106,7 @@ def test_usg_model():
     dis = flopy.modflow.ModflowDis(mf, nlay=1, nrow=11, ncol=11)
     bas = flopy.modflow.ModflowBas(mf)
     lpf = flopy.mfusg.MfUsgLpf(mf)
-    wel = flopy.mfusg.MfUsgWel(
-        mf, stress_period_data={0: [[0, 5, 5, -1.0]]}
-    )
+    wel = flopy.mfusg.MfUsgWel(mf, stress_period_data={0: [[0, 5, 5, -1.0]]})
     ghb = flopy.modflow.ModflowGhb(
         mf,
         stress_period_data={
diff --git a/autotest/t080_test.py b/autotest/t080_test.py
index c58f13fd5c..b2f2d47bb7 100644
--- a/autotest/t080_test.py
+++ b/autotest/t080_test.py
@@ -40,6 +40,7 @@
 if not os.path.exists(gridgen_ws):
     os.makedirs(gridgen_ws)
 
+
 def test_mfusg():
 
     name = "dummy"
@@ -113,18 +114,23 @@ def test_mfusg():
 
         # test if single node idx works
         one_hds = flopy.utils.HeadUFile(head_file).get_ts(idx=300)
-        if one_hds[0,1] != head[0][300]:
-            raise AssertionError("Error head from 'get_ts' != head from 'get_data'")
+        if one_hds[0, 1] != head[0][300]:
+            raise AssertionError(
+                "Error head from 'get_ts' != head from 'get_data'"
+            )
 
         # test if list of nodes for idx works
-        nodes = [300,182,65]
+        nodes = [300, 182, 65]
 
         multi_hds = flopy.utils.HeadUFile(head_file).get_ts(idx=nodes)
         for i, node in enumerate(nodes):
-            if multi_hds[0, i+1] != head[0][node]:
-                raise AssertionError("Error head from 'get_ts' != head from 'get_data'")
+            if multi_hds[0, i + 1] != head[0][node]:
+                raise AssertionError(
+                    "Error head from 'get_ts' != head from 'get_data'"
+                )
 
     return
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     test_mfusg()
diff --git a/autotest/t506_test.py b/autotest/t506_test.py
index 116f7d95a2..5b8177394b 100644
--- a/autotest/t506_test.py
+++ b/autotest/t506_test.py
@@ -456,9 +456,7 @@ def test_mfusg():
 
         # re-run with an LPF keyword specified. This would have thrown an error
         # before the addition of ikcflag to mflpf.py (flopy 3.3.3 and earlier).
-        lpf = flopy.mfusg.MfUsgLpf(
-            m, novfc=True, nocvcorrection=True
-        )
+        lpf = flopy.mfusg.MfUsgLpf(m, novfc=True, nocvcorrection=True)
         m.write_input()
         m.run_model()
 

From 76927da15170a22fc1ea3cf4d62ab39f269c463c Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Mon, 11 Oct 2021 20:03:07 -0500
Subject: [PATCH 12/15] refactor(imports): add function to import optional
 packages

---
 flopy/export/utils.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/flopy/export/utils.py b/flopy/export/utils.py
index 384f96ff87..e789c1b7c1 100644
--- a/flopy/export/utils.py
+++ b/flopy/export/utils.py
@@ -1801,7 +1801,6 @@ def export_array_contours(
 
     """
     import matplotlib.pyplot as plt
-    from ..utils import import_optional_dependency
 
     if epsg is None:
         epsg = modelgrid.epsg

From 99e6daca97bf6693de6e284e26ec28471a6b8c41 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Tue, 12 Oct 2021 08:46:19 -0500
Subject: [PATCH 13/15] refactor(imports): add function to import optional
 packages

matplotlib is now a FloPy requirement.
---
 README.md                         |  2 +-
 docs/flopy_method_dependencies.md | 34 ++++++++++++++-----------------
 etc/environment.yml               | 22 ++++++++++++++------
 flopy/utils/utl_import.py         |  2 ++
 setup.py                          |  4 ++--
 5 files changed, 36 insertions(+), 28 deletions(-)

diff --git a/README.md b/README.md
index 54dfcbf715..2b3d77b2c4 100644
--- a/README.md
+++ b/README.md
@@ -26,7 +26,7 @@ For general modeling issues, please consult a modeling forum, such as the [MODFL
 Installation
 -----------------------------------------------
 
-FloPy requires **Python** 3.7 (or higher) and **NumPy** 1.15 (or higher).  Dependencies for optional FloPy methods are summarized [here](docs/flopy_method_dependencies.md).
+FloPy requires **Python** 3.7 (or higher), **NumPy** 1.15.0 (or higher), and **matplotlib** 1.4.0 (or higher).  Dependencies for optional FloPy methods are summarized [here](docs/flopy_method_dependencies.md).
 
 To install FloPy type:
 
diff --git a/docs/flopy_method_dependencies.md b/docs/flopy_method_dependencies.md
index 9e08aa2cb7..94ff0351a1 100644
--- a/docs/flopy_method_dependencies.md
+++ b/docs/flopy_method_dependencies.md
@@ -2,31 +2,27 @@ Additional dependencies to use optional FloPy helper methods are listed below.
 
 | Method                                                                               | Python Package                                                     |
 | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------ |
-| `.PlotMapView()` in `flopy.plot`                                                     | **matplotlib** >= 1.4                                              |
-| `.PlotCrossSection()` in `flopy.plot`                                                | **matplotlib** >= 1.4                                              |
-| `.plot()`                                                                            | **matplotlib** >= 1.4                                              |
-| `.plot_shapefile()`                                                                  | **matplotlib** >= 1.4 and **Pyshp** >= 1.2                         |
-| `.to_shapefile()`                                                                    | **Pyshp** >= 1.2                                                   |
-| `.export(*.shp)`                                                                     | **Pyshp** >= 1.2                                                   |
-| `.export(*.nc)`                                                                      | **netcdf4** >= 1.1, and **python-dateutil** >= 2.4                 |
+| `.plot_shapefile()`                                                                  | **Pyshp** >= 2.0.0                                                 |
+| `.to_shapefile()`                                                                    | **Pyshp** >= 2.0.0                                                 |
+| `.export(*.shp)`                                                                     | **Pyshp** >= 2.0.0                                                 |
+| `.export(*.nc)`                                                                      | **netcdf4** >= 1.1, and **python-dateutil** >= 2.4.0               |
 | `.export(*.tif)`                                                                     | **rasterio**                                                       |
 | `.export(*.asc)` in `flopy.utils.reference` `SpatialReference` class                 | **scipy.ndimage**                                                  |
 | `.interpolate()` in `flopy.utils.reference` `SpatialReference` class                 | **scipy.interpolate**                                              |
 | `.interpolate()` in `flopy.mf6.utils.reference` `StructuredSpatialReference` class   | **scipy.interpolate**                                              |
 | `._parse_units_from_proj4()` in `flopy.utils.reference` `SpatialReference` class     | **pyproj**                                                         |
-| `.get_dataframes()` in `flopy.utils.mflistfile` `ListBudget` class                   | **pandas** >= 0.15                                                 |
-| `.get_dataframes()` in `flopy.utils.observationfile` `ObsFiles` class                | **pandas** >= 0.15                                                 |
-| `.get_dataframes()` in `flopy.utils.sfroutputfile` `ModflowSfr2` class               | **pandas** >= 0.15                                                 |
-| `.get_dataframes()` in `flopy.utils.util_list` `MfList` class                        | **pandas** >= 0.15                                                 |
-| `.get_dataframes()` in `flopy.utils.zonebud` `ZoneBudget` class                      | **pandas** >= 0.15                                                 |
-| `.pivot_keyarray()` in `flopy.mf6.utils.arrayutils` `AdvancedPackageUtil` class      | **pandas** >= 0.15                                                 |
-| `._get_vertices()` in `flopy.mf6.utils.binaryfile_utils` `MFOutputRequester` class   | **pandas** >= 0.15                                                 |
-| `.get_dataframe()` in `flopy.mf6.utils.mfobservation` `Observations` class           | **pandas** >= 0.15                                                 |
-| `.df()` in `flopy.modflow.mfsfr2` `SfrFile` class                                    | **pandas** >= 0.15                                                 |
-| `.time_coverage()` in `flopy.export.metadata` `acc` class - ***used if available***  | **pandas** >= 0.15                                                 |
-| `.loadtxt()` in `flopy.utils.flopyio` - ***used if available***                      | **pandas** >= 0.15                                                 |
+| `.get_dataframes()` in `flopy.utils.mflistfile` `ListBudget` class                   | **pandas** >= 0.15.0                                               |
+| `.get_dataframes()` in `flopy.utils.observationfile` `ObsFiles` class                | **pandas** >= 0.15.0                                               |
+| `.get_dataframes()` in `flopy.utils.sfroutputfile` `ModflowSfr2` class               | **pandas** >= 0.15.0                                               |
+| `.get_dataframes()` in `flopy.utils.util_list` `MfList` class                        | **pandas** >= 0.15.0                                               |
+| `.get_dataframes()` in `flopy.utils.zonebud` `ZoneBudget` class                      | **pandas** >= 0.15.0                                               |
+| `.pivot_keyarray()` in `flopy.mf6.utils.arrayutils` `AdvancedPackageUtil` class      | **pandas** >= 0.15.0                                               |
+| `._get_vertices()` in `flopy.mf6.utils.binaryfile_utils` `MFOutputRequester` class   | **pandas** >= 0.15.0                                               |
+| `.get_dataframe()` in `flopy.mf6.utils.mfobservation` `Observations` class           | **pandas** >= 0.15.0                                               |
+| `.df()` in `flopy.modflow.mfsfr2` `SfrFile` class                                    | **pandas** >= 0.15.0                                               |
+| `.time_coverage()` in `flopy.export.metadata` `acc` class - ***used if available***  | **pandas** >= 0.15.0                                               |
+| `.loadtxt()` in `flopy.utils.flopyio` - ***used if available***                      | **pandas** >= 0.15.0                                               |
 | `.generate_classes()` in `flopy.mf6.utils`                                           | [**pymake**](https://github.com/modflowpy/pymake)                  |
-| `.intersect()` in `flopy.discretization.VertexGrid`                                  | **matplotlib** >= 1.4                                              |
 | `GridIntersect()` in `flopy.utils.gridintersect`                                     | **shapely**                                                        |
 | `GridIntersect().plot_polygon()` in `flopy.utils.gridintersect`                      | **shapely** and **descartes**                                      |
 | `Raster()` in `flopy.utils.Raster`                                                   | **rasterio**, **affine**, and **scipy**                            |
diff --git a/etc/environment.yml b/etc/environment.yml
index e78f3169b3..83defab1cd 100644
--- a/etc/environment.yml
+++ b/etc/environment.yml
@@ -2,23 +2,27 @@ name: flopy
 channels:
   - conda-forge
 dependencies:
+  # required
   - python>=3.7
+  - numpy>=1.15.0
+  - matplotlib>=1.4.0
+
+  # testing
   - pylint
   - flake8
   - black
   - nose
   - nose-timer
   - coverage
+
+  # optional
   - appdirs
-  - requests
-  - numpy>=1.15
-  - matplotlib
-  - bmipy
+  - python-dateutil>=2.4.0
   - affine
   - scipy
   - pandas
   - netcdf4
-  - pyshp
+  - pyshp>=2.0.0
   - rasterio
   - fiona
   - descartes
@@ -26,4 +30,10 @@ dependencies:
   - shapely
   - geos
   - geojson
-  - vtk
\ No newline at end of file
+  - vtk
+
+  # external dependencies
+  - requests
+
+  # MODFLOW API dependencies
+  - bmipy
diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py
index 1c2abcc2fc..7efe1e8f21 100644
--- a/flopy/utils/utl_import.py
+++ b/flopy/utils/utl_import.py
@@ -48,6 +48,8 @@
 
 VERSIONS = {
     "shapefile": "2.0.0",
+    "dateutil": "2.4.0",
+    "pandas": "0.15.0",
 }
 
 # A mapping from import name to package name (on PyPI) for packages where
diff --git a/setup.py b/setup.py
index 5950da4206..7580eec8e5 100644
--- a/setup.py
+++ b/setup.py
@@ -29,8 +29,8 @@
     license="CC0",
     platforms="Windows, Mac OS-X, Linux",
     install_requires=[
-        "numpy >=1.15",
-        "matplotlib",
+        "numpy>=1.15.0",
+        "matplotlib>=1.4.0",
     ],
     packages=[
         "flopy",

From 5113cfd95b9ba4734fc20cff6156a349fe2fdaf6 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Tue, 12 Oct 2021 11:04:11 -0500
Subject: [PATCH 14/15] refactor(imports): add function to import optional
 packages

matplotlib is now a FloPy requirement.

Closes #1257
---
 flopy/export/utils.py               | 17 +++++++++++------
 flopy/mf6/utils/binaryfile_utils.py |  6 ++++--
 flopy/mf6/utils/mfobservation.py    |  6 ++++--
 flopy/utils/mflistfile.py           |  6 ++++--
 flopy/utils/mtlistfile.py           | 12 ++++++++----
 flopy/utils/observationfile.py      |  6 ++++--
 flopy/utils/util_list.py            |  6 ++++--
 flopy/utils/utl_import.py           |  6 +++---
 flopy/utils/voronoi.py              |  6 ++++--
 flopy/utils/zonbud.py               | 12 ++++++++----
 10 files changed, 54 insertions(+), 29 deletions(-)

diff --git a/flopy/export/utils.py b/flopy/export/utils.py
index e789c1b7c1..9761de2775 100644
--- a/flopy/export/utils.py
+++ b/flopy/export/utils.py
@@ -1511,8 +1511,10 @@ def export_array(
         a = a.copy()
         a[np.isnan(a)] = nodata
         if modelgrid.angrot != 0:
-            extra = "exporting rotated grids requires SciPy."
-            ndimage = import_optional_dependency("scipy.ndimage", extra=extra)
+            ndimage = import_optional_dependency(
+                "scipy.ndimage",
+                error_message="exporting rotated grids requires SciPy.",
+            )
 
             a = ndimage.rotate(a, modelgrid.angrot, cval=nodata)
             height_rot, width_rot = a.shape
@@ -1548,8 +1550,10 @@ def export_array(
             or modelgrid.delr[0] != modelgrid.delc[0]
         ):
             raise ValueError("GeoTIFF export require a uniform grid.")
-        extra = "GeoTIFF export requires the rasterio."
-        rasterio = import_optional_dependency("rasterio", extra=extra)
+        rasterio = import_optional_dependency(
+            "rasterio",
+            error_message="GeoTIFF export requires the rasterio.",
+        )
         dxdy = modelgrid.delc[0]
         # because this is only implemented for a structured grid,
         # we can get the xul and yul coordinate from modelgrid.xvertices(0, 0)
@@ -1709,8 +1713,9 @@ def export_contourf(
 
     """
 
-    extra = "export_contourf requires shapely."
-    shapely = import_optional_dependency("shapely", extra=extra)
+    shapely = import_optional_dependency(
+        "shapely", error_message="export_contourf requires shapely."
+    )
     from shapely import geometry
 
     from ..utils.geometry import Polygon
diff --git a/flopy/mf6/utils/binaryfile_utils.py b/flopy/mf6/utils/binaryfile_utils.py
index b97967a193..2b222259c9 100644
--- a/flopy/mf6/utils/binaryfile_utils.py
+++ b/flopy/mf6/utils/binaryfile_utils.py
@@ -229,8 +229,10 @@ def _get_vertices(mfdict, key):
         elevations corresponding to a row column location
         """
 
-        extra = "MFOutputRequester._get_vertices() requires pandas."
-        pd = import_optional_dependency("pandas", extra=extra)
+        pd = import_optional_dependency(
+            "pandas",
+            error_message="MFOutputRequester._get_vertices() requires pandas.",
+        )
 
         mname = key[0]
         cellid = mfdict[(mname, "DISV8", "CELL2D", "cell2d_num")]
diff --git a/flopy/mf6/utils/mfobservation.py b/flopy/mf6/utils/mfobservation.py
index a3869022c7..0c19054141 100644
--- a/flopy/mf6/utils/mfobservation.py
+++ b/flopy/mf6/utils/mfobservation.py
@@ -209,8 +209,10 @@ def get_dataframe(
         pd.DataFrame
 
         """
-        extra = "get_dataframe() requires pandas."
-        pd = import_optional_dependency("pandas", extra=extra)
+        pd = import_optional_dependency(
+            "pandas",
+            error_message="get_dataframe() requires pandas.",
+        )
 
         data_str = self._reader(self.Obsname)
         data = self._array_to_dict(data_str)
diff --git a/flopy/utils/mflistfile.py b/flopy/utils/mflistfile.py
index e982c68eb4..08ae2f1de8 100644
--- a/flopy/utils/mflistfile.py
+++ b/flopy/utils/mflistfile.py
@@ -475,8 +475,10 @@ def get_dataframes(self, start_datetime="1-1-1970", diff=False):
 
         """
 
-        extra = "ListBudget.get_dataframes() requires pandas."
-        pd = import_optional_dependency("pandas", extra=extra)
+        pd = import_optional_dependency(
+            "pandas",
+            error_message="ListBudget.get_dataframes() requires pandas.",
+        )
 
         if not self._isvalid:
             return None
diff --git a/flopy/utils/mtlistfile.py b/flopy/utils/mtlistfile.py
index a0d67e31aa..75e777484a 100644
--- a/flopy/utils/mtlistfile.py
+++ b/flopy/utils/mtlistfile.py
@@ -75,8 +75,10 @@ def parse(
             (optionally) surface-water mass budget.
             If the SFT process is not used, df_sw is None.
         """
-        extra = "MtListBudget.parse() requires pandas."
-        pd = import_optional_dependency("pandas", extra=extra)
+        pd = import_optional_dependency(
+            "pandas",
+            error_message="MtListBudget.parse() requires pandas.",
+        )
 
         self.gw_data = {}
         self.sw_data = {}
@@ -183,8 +185,10 @@ def parse(
         return df_gw, df_sw
 
     def _diff(self, df):
-        extra = "MtListBudget._diff() requires pandas."
-        pd = import_optional_dependency("pandas", extra=extra)
+        pd = import_optional_dependency(
+            "pandas",
+            error_message="MtListBudget._diff() requires pandas.",
+        )
 
         out_cols = [
             c for c in df.columns if "_out" in c and not c.startswith("net_")
diff --git a/flopy/utils/observationfile.py b/flopy/utils/observationfile.py
index 73c4cdf5df..3ca3506029 100644
--- a/flopy/utils/observationfile.py
+++ b/flopy/utils/observationfile.py
@@ -177,8 +177,10 @@ def get_dataframe(
 
         from ..utils.utils_def import totim_to_datetime
 
-        extra = "ObsFiles.get_dataframe() requires pandas."
-        pd = import_optional_dependency("pandas", extra=extra)
+        pd = import_optional_dependency(
+            "pandas",
+            error_message="ObsFiles.get_dataframe() requires pandas.",
+        )
 
         i0 = 0
         i1 = self.data.shape[0]
diff --git a/flopy/utils/util_list.py b/flopy/utils/util_list.py
index 8e07823817..5b9d781711 100644
--- a/flopy/utils/util_list.py
+++ b/flopy/utils/util_list.py
@@ -442,8 +442,10 @@ def get_dataframe(self, squeeze=False):
         Requires pandas.
 
         """
-        extra = "MfList.get_dataframe() requires pandas."
-        pd = import_optional_dependency("pandas", extra=extra)
+        pd = import_optional_dependency(
+            "pandas",
+            error_message="MfList.get_dataframe() requires pandas.",
+        )
 
         # make a dataframe of all data for all stress periods
         names = ["per", "k", "i", "j"]
diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py
index 7efe1e8f21..caa2208b71 100644
--- a/flopy/utils/utl_import.py
+++ b/flopy/utils/utl_import.py
@@ -74,7 +74,7 @@ def get_version(module: types.ModuleType) -> str:
 
 def import_optional_dependency(
     name: str,
-    extra: str = "",
+    error_message: str = "",
     errors: str = "raise",
     min_version: str | None = None,
 ):
@@ -89,7 +89,7 @@ def import_optional_dependency(
     ----------
     name : str
         The module name.
-    extra : str
+    error_message : str
         Additional text to include in the ImportError message.
     errors : str {'raise', 'warn', 'ignore'}
         What to do when a dependency is not found or its version is too old.
@@ -121,7 +121,7 @@ def import_optional_dependency(
     install_name = package_name if package_name is not None else name
 
     msg = (
-        f"Missing optional dependency '{install_name}'. {extra} "
+        f"Missing optional dependency '{install_name}'. {error_message} "
         f"Use pip or conda to install {install_name}."
     )
     try:
diff --git a/flopy/utils/voronoi.py b/flopy/utils/voronoi.py
index 45b73881b9..e8785750fe 100644
--- a/flopy/utils/voronoi.py
+++ b/flopy/utils/voronoi.py
@@ -72,8 +72,10 @@ def tri2vor(tri, **kwargs):
     verts, iverts : ndarray, list of lists
 
     """
-    extra = "Voronoi requires SciPy."
-    import_optional_dependency("scipy.spatial", extra=extra)
+    import_optional_dependency(
+        "scipy.spatial",
+        error_message="Voronoi requires SciPy.",
+    )
     from scipy.spatial import Voronoi
 
     # assign local variables
diff --git a/flopy/utils/zonbud.py b/flopy/utils/zonbud.py
index 336ac873cf..ed7bcde029 100644
--- a/flopy/utils/zonbud.py
+++ b/flopy/utils/zonbud.py
@@ -2369,8 +2369,10 @@ def _recarray_to_dataframe(
 
     pd.DataFrame
     """
-    extra = "ZoneBudget.get_dataframes() requires pandas."
-    pd = import_optional_dependency("pandas", extra=extra)
+    pd = import_optional_dependency(
+        "pandas",
+        error_message="ZoneBudget.get_dataframes() requires pandas.",
+    )
 
     valid_index_keys = ["totim", "kstpkper"]
     s = f'index_key "{index_key}" is not valid.'
@@ -2991,8 +2993,10 @@ def _volumetric_flux(recarray, modeltime, extrapolate_kper=False):
         pd.DataFrame
 
     """
-    extra = "ZoneBudget._volumetric_flux() requires pandas."
-    pd = import_optional_dependency("pandas", extra=extra)
+    pd = import_optional_dependency(
+        "pandas",
+        error_message="ZoneBudget._volumetric_flux() requires pandas.",
+    )
 
     nper = len(modeltime.nstp)
     volumetric_data = {}

From c6723f455d5ee42bbfaa390ab5165f0e23adecd0 Mon Sep 17 00:00:00 2001
From: Joseph D Hughes 
Date: Wed, 13 Oct 2021 11:36:37 -0500
Subject: [PATCH 15/15] refactor(imports): add function to import optional
 packages

matplotlib is now a FloPy requirement.

Closes #1257
---
 docs/flopy_method_dependencies.md | 4 ++++
 flopy/utils/gridintersect.py      | 2 +-
 flopy/utils/utl_import.py         | 4 ++--
 3 files changed, 7 insertions(+), 3 deletions(-)

diff --git a/docs/flopy_method_dependencies.md b/docs/flopy_method_dependencies.md
index 94ff0351a1..fca7dd721c 100644
--- a/docs/flopy_method_dependencies.md
+++ b/docs/flopy_method_dependencies.md
@@ -29,3 +29,7 @@ Additional dependencies to use optional FloPy helper methods are listed below.
 | `Raster().sample_polygon()` in `flopy.utils.Raster`                                  | **shapely**                                                        |
 | `Raster().crop()` in `flopy.utils.Raster`                                            | **shapely**                                                        |
 | `.array_at_verts()` in `flopy.discretization.structuredgrid` `StructuredGrid` class  | **scipy.interpolate**                                              |
+| `get_sciencebase_xml_metadata()` in `flopy.export.metadata` `acdd` class             | **defusedxml**                                                     |
+| `flopy.utils.geospatial_utils` `GeoSpatialUtil` class                                | **geojson**                                                        |
+| `flopy.utils.geospatial_utils` `GeoSpatialCollection` class                          | **geojson**                                                        |
+| `flopy.export.vtk` `Vtk` class                                                       | **vtk**                                                            |
diff --git a/flopy/utils/gridintersect.py b/flopy/utils/gridintersect.py
index 413cab42a2..6e3d9141ab 100644
--- a/flopy/utils/gridintersect.py
+++ b/flopy/utils/gridintersect.py
@@ -170,7 +170,7 @@ def __init__(self, mfgrid, method=None, rtree=True):
             if self.rtree:
                 strtree = import_optional_dependency(
                     "shapely.strtree",
-                    extra="STRTree requires shapely",
+                    error_message="STRTree requires shapely",
                 )
                 self.strtree = strtree.STRtree(self._get_gridshapes())
 
diff --git a/flopy/utils/utl_import.py b/flopy/utils/utl_import.py
index caa2208b71..321215c7a5 100644
--- a/flopy/utils/utl_import.py
+++ b/flopy/utils/utl_import.py
@@ -44,7 +44,7 @@
 
 from .parse_version import Version
 
-# Update install.rst when updating versions!
+# Update docs/flopy_method_dependencies.md when updating versions!
 
 VERSIONS = {
     "shapefile": "2.0.0",
@@ -104,7 +104,7 @@ def import_optional_dependency(
         * silent: Same as "ignore" except warning message is not written to
           the screen.
     min_version : str, default None
-        Specify a minimum version that is different from the global pandas
+        Specify a minimum version that is different from the global FloPy
         minimum version required.
     Returns
     -------