From 82ccc7576bac44a1377c0e8a28d979017c4d7cc3 Mon Sep 17 00:00:00 2001 From: David Bold Date: Mon, 19 Sep 2022 15:10:07 +0200 Subject: [PATCH 01/42] Try to give better error --- xbout/load.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/xbout/load.py b/xbout/load.py index 39c77d6f..35a6ef56 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -776,6 +776,19 @@ def _arrange_for_concatenation(filepaths, nxpe=1, nype=1): nprocs = nxpe * nype n_runs = int(len(filepaths) / nprocs) + if len(filepaths) < nprocs: + if len(filepaths) == 1: + raise ValueError( + "A parallel simulation was loaded, but only a single " + "file was loaded. Please ensure to pass in all files " + "by specifing e.g. `BOUT.dmp.*.nc` rather than " + "`BOUT.dmp.0.nc`." + ) + raise ValueError( + f"A parallel simulation was loaded, but only a {len(filepathts)} " + "files were loaded. Please ensure to pass in all files " + "by specifing e.g. `BOUT.dmp.*.nc`" + ) if len(filepaths) % nprocs != 0: raise ValueError( "Each run directory does not contain an equal number " From 5b5e42934ff24d9f3121cc61ecb7f97bdd5baaeb Mon Sep 17 00:00:00 2001 From: David Bold Date: Mon, 19 Sep 2022 15:10:36 +0200 Subject: [PATCH 02/42] Use run_id to sort files --- xbout/load.py | 109 +++++++++++++++++++++++++++++++++----------------- 1 file changed, 72 insertions(+), 37 deletions(-) diff --git a/xbout/load.py b/xbout/load.py index 35a6ef56..69534b15 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -776,46 +776,81 @@ def _arrange_for_concatenation(filepaths, nxpe=1, nype=1): nprocs = nxpe * nype n_runs = int(len(filepaths) / nprocs) - if len(filepaths) < nprocs: - if len(filepaths) == 1: + runids = [] + for fp in filepaths: + with xr.open_dataset(fp) as tmp: + if "run_id" not in tmp: + runids = None + break + runids.append(tmp["run_id"]) + if not runids: + if len(filepaths) < nprocs: + if len(filepaths) == 1: + raise ValueError( + "A parallel simulation was loaded, but only a single " + "file was loaded. Please ensure to pass in all files " + "by specifing e.g. `BOUT.dmp.*.nc` rather than " + "`BOUT.dmp.0.nc`." + ) raise ValueError( - "A parallel simulation was loaded, but only a single " - "file was loaded. Please ensure to pass in all files " - "by specifing e.g. `BOUT.dmp.*.nc` rather than " - "`BOUT.dmp.0.nc`." + f"A parallel simulation was loaded, but only a {len(filepathts)} " + "files were loaded. Please ensure to pass in all files " + "by specifing e.g. `BOUT.dmp.*.nc`" ) - raise ValueError( - f"A parallel simulation was loaded, but only a {len(filepathts)} " - "files were loaded. Please ensure to pass in all files " - "by specifing e.g. `BOUT.dmp.*.nc`" - ) - if len(filepaths) % nprocs != 0: - raise ValueError( - "Each run directory does not contain an equal number " - "of output files. If the parallelization scheme of " - "your simulation changed partway-through, then please " - "load each directory separately and concatenate them " - "along the time dimension with xarray.concat()." - ) + if len(filepaths) % nprocs != 0: + raise ValueError( + "Each run directory does not contain an equal number " + "of output files. If the parallelization scheme of " + "your simulation changed partway-through, then please " + "load each directory separately and concatenate them " + "along the time dimension with xarray.concat()." + ) + # Create list of lists of filepaths, so that xarray knows how they should + # be concatenated by xarray.open_mfdataset() + paths = iter(filepaths) + paths_grid = [ + [[next(paths) for x in range(nxpe)] for y in range(nype)] + for t in range(n_runs) + ] + + else: + paths_sorted = [] + lastid = None + for path, gid in zip(filepaths, runids): + if lastid != gid: + lastid = gid + paths_sorted.append([]) + paths_sorted[-1].append(path) + paths_grid = [] + for paths in paths_sorted: + if len(paths) != nprocs: + with xr.open_dataset(paths[0]) as tmp: + if tmp["PE_XIND"] != 0 or tmp["PE_YIND"] != 0: + # The first file is missing. + warn( + f"Ignoring {len(paths)} files as the first seems to be missing: {paths}" + ) + continue + assert tmp["NXPE"] == nxpe + assert tmp["NYPE"] == nype + raise ValueError( + f"Something is wrong. We expected {nprocs} files but found {len(paths)} files." + ) + paths = iter(paths) + + paths_grid.append([[next(paths) for x in range(nxpe)] for y in range(nype)]) + + # Dimensions along which no concatenation is needed are still present as + # single-element lists, so need to concatenation along dim=None for those + concat_dims = [None, None, None] + if len(filepaths) > nprocs: + concat_dims[0] = "t" + if nype > 1: + concat_dims[1] = "y" + if nxpe > 1: + concat_dims[2] = "x" - # Create list of lists of filepaths, so that xarray knows how they should - # be concatenated by xarray.open_mfdataset() - paths = iter(filepaths) - paths_grid = [ - [[next(paths) for x in range(nxpe)] for y in range(nype)] for t in range(n_runs) - ] - - # Dimensions along which no concatenation is needed are still present as - # single-element lists, so need to concatenation along dim=None for those - concat_dims = [None, None, None] - if len(filepaths) > nprocs: - concat_dims[0] = "t" - if nype > 1: - concat_dims[1] = "y" - if nxpe > 1: - concat_dims[2] = "x" - - return paths_grid, concat_dims + return paths_grid, concat_dims def _trim(ds, *, guards, keep_boundaries, nxpe, nype, is_restart): From 1833b78f0607cca3e7283e578fcf1a474d2d0ce3 Mon Sep 17 00:00:00 2001 From: David Bold Date: Fri, 30 Sep 2022 11:56:43 +0200 Subject: [PATCH 03/42] fix typo Co-authored-by: John Omotani --- xbout/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/load.py b/xbout/load.py index 69534b15..0cd7f98e 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -793,7 +793,7 @@ def _arrange_for_concatenation(filepaths, nxpe=1, nype=1): "`BOUT.dmp.0.nc`." ) raise ValueError( - f"A parallel simulation was loaded, but only a {len(filepathts)} " + f"A parallel simulation was loaded, but only {len(filepathts)} " "files were loaded. Please ensure to pass in all files " "by specifing e.g. `BOUT.dmp.*.nc`" ) From 0d83068f26383ee11ed31e9ae096a26dab7cbf21 Mon Sep 17 00:00:00 2001 From: David Bold Date: Fri, 30 Sep 2022 11:58:40 +0200 Subject: [PATCH 04/42] Fix indention xarray does not like merging if the data is not square. So unfortunately we cannot easily support the case where some MPI options changed during the simulations Co-authored-by: John Omotani --- xbout/load.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/xbout/load.py b/xbout/load.py index 0cd7f98e..52c8c6b0 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -840,17 +840,17 @@ def _arrange_for_concatenation(filepaths, nxpe=1, nype=1): paths_grid.append([[next(paths) for x in range(nxpe)] for y in range(nype)]) - # Dimensions along which no concatenation is needed are still present as - # single-element lists, so need to concatenation along dim=None for those - concat_dims = [None, None, None] - if len(filepaths) > nprocs: - concat_dims[0] = "t" - if nype > 1: - concat_dims[1] = "y" - if nxpe > 1: - concat_dims[2] = "x" - - return paths_grid, concat_dims + # Dimensions along which no concatenation is needed are still present as + # single-element lists, so need to concatenation along dim=None for those + concat_dims = [None, None, None] + if len(filepaths) > nprocs: + concat_dims[0] = "t" + if nype > 1: + concat_dims[1] = "y" + if nxpe > 1: + concat_dims[2] = "x" + + return paths_grid, concat_dims def _trim(ds, *, guards, keep_boundaries, nxpe, nype, is_restart): From ff65dfa087515b5fb0bce7abd838028e009a7b17 Mon Sep 17 00:00:00 2001 From: David Bold Date: Fri, 30 Sep 2022 12:45:19 +0200 Subject: [PATCH 05/42] generalise getting run_id * The filepaths may actually be datasets * The files may not exist, as they may for testing only --- xbout/load.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/xbout/load.py b/xbout/load.py index 52c8c6b0..1331b5e2 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -777,12 +777,22 @@ def _arrange_for_concatenation(filepaths, nxpe=1, nype=1): nprocs = nxpe * nype n_runs = int(len(filepaths) / nprocs) runids = [] + + def getrunid(fp): + if _is_path(fp): + try: + with xr.open_dataset(fp) as tmp: + return tmp.get("run_id", None) + except FileNotFoundError: + return None + return fp.get("run_id", None) + for fp in filepaths: - with xr.open_dataset(fp) as tmp: - if "run_id" not in tmp: - runids = None - break - runids.append(tmp["run_id"]) + thisrunid = getrunid(fp) + if thisrunid is None: + runids = None + break + runids.append(thisrunid) if not runids: if len(filepaths) < nprocs: if len(filepaths) == 1: From fabee143ea18a0656aa3ee46e527332265d17ecd Mon Sep 17 00:00:00 2001 From: David Bold Date: Mon, 24 Jul 2023 14:52:06 +0200 Subject: [PATCH 06/42] Switch to animatplot-ng --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 9464f8b8..c71bd235 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,7 +34,7 @@ install_requires = gelidum>=0.5.3 natsort>=5.5.0 matplotlib>=3.1.1,!=3.3.0,!=3.3.1,!=3.3.2 - animatplot>=0.4.2 + animatplot-ng>=0.4.2 netcdf4>=1.4.0 Pillow>=6.1.0 importlib-metadata; python_version < "3.8" From 4a1f3c767d0a9389d0bf8be0683fcbc42f84bcbd Mon Sep 17 00:00:00 2001 From: David Bold Date: Mon, 24 Jul 2023 16:00:01 +0200 Subject: [PATCH 07/42] Remove codecov --- .github/workflows/master.yml | 8 ++------ .github/workflows/pythonpackage.yml | 8 ++------ README.md | 3 ++- 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index e53792da..6f995f3b 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -22,7 +22,7 @@ jobs: matrix: python-version: [3.8, 3.9, '3.10', 3.x] pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0" + - "setuptools pip pytest pytest-cov coverage boutdata xarray numpy>=1.16.0" fail-fast: false steps: @@ -41,8 +41,6 @@ jobs: run: | pip install pytest pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 pytest-min-versions: @@ -53,7 +51,7 @@ jobs: matrix: python-version: [3.8] pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. + - "setuptools pip pytest pytest-cov coverage boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. fail-fast: false steps: @@ -72,8 +70,6 @@ jobs: run: | pip install pytest pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 # Need to tidy up the things that flake8 finds before we activate this diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 68ede8d9..63b72e5d 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -22,7 +22,7 @@ jobs: matrix: python-version: [3.8, 3.9, '3.10', 3.x] pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0" + - "setuptools pip pytest pytest-cov coverage boutdata xarray numpy>=1.16.0" fail-fast: false steps: @@ -41,8 +41,6 @@ jobs: run: | pip install pytest pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 pytest-min-versions: @@ -53,7 +51,7 @@ jobs: matrix: python-version: [3.8] pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. + - "setuptools pip pytest pytest-cov coverage boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. fail-fast: false steps: @@ -72,8 +70,6 @@ jobs: run: | pip install pytest pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 # Need to tidy up the things that flake8 finds before we activate this diff --git a/README.md b/README.md index ec283403..a0ffa766 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # xBOUT [![Build Status](https://github.com/boutproject/xBOUT/workflows/master/badge.svg)](https://github.com/boutproject/xBOUT/actions) -[![codecov](https://codecov.io/gh/boutproject/xBOUT/branch/master/graph/badge.svg)](https://codecov.io/gh/boutproject/xBOUT) [![Documentation Status](https://readthedocs.org/projects/xbout/badge/?version=latest)](https://xbout.readthedocs.io/en/latest/?badge=latest) [![DOI](https://zenodo.org/badge/160846663.svg)](https://zenodo.org/badge/latestdoi/160846663) +[![Documentation Status](https://readthedocs.org/projects/xbout/badge/?version=latest)](https://xbout.readthedocs.io/en/latest/?badge=latest) +[![DOI](https://zenodo.org/badge/160846663.svg)](https://zenodo.org/badge/latestdoi/160846663) Documentation: https://xbout.readthedocs.io From dd75c68bf3dd72dc75403dcd3693c6d7cf4cff3d Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 13:17:35 +0100 Subject: [PATCH 08/42] Switch completely to pyproject.toml --- pyproject.toml | 75 +++++++++++++++++++++++++++++++++++++++++++++++++- setup.cfg | 62 ----------------------------------------- setup.py | 6 ---- 3 files changed, 74 insertions(+), 69 deletions(-) delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/pyproject.toml b/pyproject.toml index 97869c5b..7fbe3ac9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,79 @@ [build-system] -requires = ["setuptools >= 42", "wheel", "setuptools_scm[toml]>=3.4", "setuptools_scm_git_archive"] +requires = [ + "setuptools >= 65", + "setuptools_scm[toml] >= 6.2", + "setuptools_scm_git_archive", + "wheel >= 0.29.0", +] build-backend = "setuptools.build_meta" +[project] +name = "xbout" +description = "Collect data from BOUT++ runs in python using xarray" +readme = "README.md" +authors = [ + {name = "Thomas Nicholas", email = "thomas.nicholas@york.ac.uk"}, + {name = "John Omotani"}, +] +license = {file = "LICENSE"} +dynamic = ["version"] +keywords = ["gyrokinetics", "analysis", "plasma", "research"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Science/Research", + "Intended Audience :: Education", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering :: Visualization", +] +requires-python = ">=3.8" +dependencies = [ + "xarray>=0.18.0,<2022.9.0", + "boutdata>=0.1.4", + "dask[array]>=2.10.0", + "gelidum>=0.5.3", + "natsort>=5.5.0", + "matplotlib>=3.1.1,!=3.3.0,!=3.3.1,!=3.3.2", + "animatplot-ng>=0.4.2", + "netcdf4>=1.4.0", + "Pillow>=6.1.0", +] + +[project.optional-dependencies] +calc = [ + "numpy >= 1.13.0", + "scipy >= 1.3.0", + "dask >= 2.2.0", + "statsmodels >= 0.10.1", + "xrft", + "xhistogram", +] +docs = [ + "sphinx >= 1.4", +] +3d_plot = [ + "k3d >= 2.8.0", + "mayavi >= 4.7.2", + "wand", +] +tests = [ + "pytest >= 3.3.0", + "pytest-cov", +] + +[project.urls] +Source = "https://github.com/boutproject/xBOUT" +Tracker = "https://github.com/boutproject/xBOUT/issues" +Documentation = "https://xbout.readthedocs.io/en/latest/" + [tool.setuptools_scm] write_to = "xbout/_version.py" + +[tool.setuptools] +packages = ["xbout"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index c71bd235..00000000 --- a/setup.cfg +++ /dev/null @@ -1,62 +0,0 @@ -[metadata] -name = xbout -url = https://github.com/boutproject/xBOUT -author = Thomas Nicholas -author_email = thomas.nicholas@york.ac.uk -description = Collect data from BOUT++ runs in python using xarray -license = Apache -python_requires = >=3.8 -long_description = file: README.md -long_description_content_type = text/markdown -classifiers = - Development Status :: 3 - Alpha - Intended Audience :: Science/Research - Intended Audience :: Education - Intended Audience :: Developers - License :: OSI Approved :: Apache Software License - Natural Language :: English - Operating System :: POSIX :: Linux - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - Topic :: Scientific/Engineering :: Visualization - -[options] -setup_requires = - setuptools>=42 - setuptools_scm[toml]>=3.4 - setuptools_scm_git_archive -install_requires = - xarray>=0.18.0,<2022.9.0 - boutdata>=0.1.4 - dask[array]>=2.10.0 - gelidum>=0.5.3 - natsort>=5.5.0 - matplotlib>=3.1.1,!=3.3.0,!=3.3.1,!=3.3.2 - animatplot-ng>=0.4.2 - netcdf4>=1.4.0 - Pillow>=6.1.0 - importlib-metadata; python_version < "3.8" -include_package_data = True -packages = find: - -[options.extras_require] -calc = - numpy >= 1.13.0 - scipy >= 1.3.0 - dask >= 2.2.0 - statsmodels >= 0.10.1 - xrft - xhistogram -docs = sphinx >= 1.4 -3d_plot = - k3d >= 2.8.0 - mayavi >= 4.7.2 - wand - -[build_sphinx] -project = $metadata.name -version = $metadata.version -release = $metadata.version -source-dir = docs diff --git a/setup.py b/setup.py deleted file mode 100644 index c7ba67b2..00000000 --- a/setup.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python3 - -from setuptools import setup - -if __name__ == "__main__": - setup(use_scm_version=True) From 5af6af13e551790e2a322786ce7a5ce2b50e1fe3 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 13:33:19 +0100 Subject: [PATCH 09/42] Bump min numpy to 1.18 Minimum supported version for python 3.8 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7fbe3ac9..128c96f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ dependencies = [ [project.optional-dependencies] calc = [ - "numpy >= 1.13.0", + "numpy >= 1.18.0", "scipy >= 1.3.0", "dask >= 2.2.0", "statsmodels >= 0.10.1", From 4b4509be64042b81500ac79757fc091afb00a8be Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 13:35:45 +0100 Subject: [PATCH 10/42] CI: Bump versions of actions in workflows --- .github/workflows/master.yml | 8 ++++---- .github/workflows/pythonpackage.yml | 8 ++++---- .github/workflows/pythonpublish.yml | 12 ++++++------ 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 6f995f3b..8e7501f4 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -26,9 +26,9 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -55,9 +55,9 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 63b72e5d..96c510f0 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -26,9 +26,9 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -55,9 +55,9 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 07ce4bdd..5bcc8916 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -20,9 +20,9 @@ jobs: fail-fast: true steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,9 +49,9 @@ jobs: fail-fast: true steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -108,9 +108,9 @@ jobs: needs: [pytest, pytest-min-versions, black] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: '3.x' - name: Install dependencies From d717d8be87abe70e453c2fdf830451f027e94df5 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 13:37:23 +0100 Subject: [PATCH 11/42] CI: Rejig some of the CI - install dependencies from package - install package in separate step --- .github/workflows/master.yml | 24 ++++++++++++------------ .github/workflows/pythonpackage.yml | 25 ++++++++++++------------- 2 files changed, 24 insertions(+), 25 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 8e7501f4..6f752708 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -20,9 +20,7 @@ jobs: if: always() strategy: matrix: - python-version: [3.8, 3.9, '3.10', 3.x] - pip-packages: - - "setuptools pip pytest pytest-cov coverage boutdata xarray numpy>=1.16.0" + python-version: ["3.8", "3.9", "3.10", "3.x"] fail-fast: false steps: @@ -35,23 +33,24 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . + - name: Install package + run: | + pip install -e .[calc,tests] - name: Test with pytest run: | - pip install pytest - pytest -v --long --cov + pytest -vv --long --cov + # test with oldest supported version of packages pytest-min-versions: runs-on: ubuntu-latest if: always() strategy: matrix: - python-version: [3.8] + python-version: ["3.8"] pip-packages: - - "setuptools pip pytest pytest-cov coverage boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. + - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot-ng==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" fail-fast: false steps: @@ -65,11 +64,12 @@ jobs: sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . + - name: Install package + run: | + pip install -e .[tests] - name: Test with pytest run: | - pip install pytest - pytest -v --long --cov + pytest -vv --long --cov # Need to tidy up the things that flake8 finds before we activate this diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 96c510f0..38625b7e 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,9 +20,7 @@ jobs: if: always() strategy: matrix: - python-version: [3.8, 3.9, '3.10', 3.x] - pip-packages: - - "setuptools pip pytest pytest-cov coverage boutdata xarray numpy>=1.16.0" + python-version: ["3.8", "3.9", "3.10", "3.x"] fail-fast: false steps: @@ -35,23 +33,24 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . + - name: Install package + run: | + pip install -e .[calc,tests] - name: Test with pytest run: | - pip install pytest - pytest -v --long --cov + pytest -vv --long --cov + # test with oldest supported version of packages pytest-min-versions: runs-on: ubuntu-latest if: always() strategy: matrix: - python-version: [3.8] + python-version: ["3.8"] pip-packages: - - "setuptools pip pytest pytest-cov coverage boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. + - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot-ng==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" fail-fast: false steps: @@ -65,12 +64,12 @@ jobs: sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . + - name: Install package + run: | + pip install -e .[tests] - name: Test with pytest run: | - pip install pytest - pytest -v --long --cov - + pytest -vv --long --cov # Need to tidy up the things that flake8 finds before we activate this #flake8: From 493b2944465e183d9d3352974e66c3ad1a32db20 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 13:48:05 +0100 Subject: [PATCH 12/42] CI: Use trusted publisher publish workflow - Delete tests from publish workflow, these should already have run! - Use credential-less "trusted publisher" workflow, see: https://docs.pypi.org/trusted-publishers/ --- .github/workflows/pythonpublish.yml | 119 +++------------------------- 1 file changed, 10 insertions(+), 109 deletions(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 5bcc8916..3f2ebad4 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -5,108 +5,15 @@ name: Upload Python Package on: release: - types: [created] + types: [published] jobs: - pytest: - - runs-on: ubuntu-latest - if: always() - strategy: - matrix: - python-version: [3.8, 3.9, '3.10', 3.x] - pip-packages: - - "setuptools pip pytest boutdata xarray numpy>=1.16.0" - fail-fast: true - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev - python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . - - name: Test with pytest - run: | - pip install pytest - pytest -v --long - - - pytest-min-versions: - - runs-on: ubuntu-latest - if: always() - strategy: - matrix: - python-version: [3.8] - pip-packages: - - "setuptools pip pytest boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. - fail-fast: true - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev - python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . - - name: Test with pytest - run: | - pip install pytest - pytest -v --long - - - # Need to tidy up the things that flake8 finds before we activate this - #flake8: - - # runs-on: ubuntu-latest - # if: always() - - # steps: - # - uses: actions/checkout@v2 - # - name: Set up Python - # uses: actions/setup-python@v1 - # - name: Install dependencies - # run: | - # python -m pip install --upgrade pip - # - name: Lint with flake8 - # run: | - # pip install flake8 - # flake8 - - - black: - - runs-on: ubuntu-latest - if: always() - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - - name: Check formatting with black - run: | - pip install black - black --check . - deploy: - + name: Upload release to PyPI runs-on: ubuntu-latest - needs: [pytest, pytest-min-versions, black] - + environment: release + permissions: + id-token: write steps: - uses: actions/checkout@v3 - name: Set up Python @@ -116,14 +23,8 @@ jobs: - name: Install dependencies run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine - pip install -e . - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - git fetch --tags --unshallow - python setup.py sdist bdist_wheel - twine upload dist/* + python -m pip install --upgrade pip build + - name: Build package + runs: python -m build --sdist --wheel + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 From d1cd1db4233e0065243dd794040a120491fb9a6d Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 13:54:38 +0100 Subject: [PATCH 13/42] Apply black formatting --- xbout/plotting/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xbout/plotting/utils.py b/xbout/plotting/utils.py index a99ef257..2e443e0a 100644 --- a/xbout/plotting/utils.py +++ b/xbout/plotting/utils.py @@ -253,5 +253,5 @@ def _k3d_plot_isel(da_region, isel, vmin, vmax, **kwargs): indices, attribute=data, color_range=[vmin, vmax], - **kwargs + **kwargs, ) From d296804aaaa470c71b82c7fd5331fb9f1ba053ac Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 13:55:31 +0100 Subject: [PATCH 14/42] CI: Auto-apply black formatting --- .github/workflows/black.yml | 37 +++++++++++++++++++++++++++++ .github/workflows/master.yml | 18 -------------- .github/workflows/pythonpackage.yml | 18 -------------- 3 files changed, 37 insertions(+), 36 deletions(-) create mode 100644 .github/workflows/black.yml diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml new file mode 100644 index 00000000..fd267d5f --- /dev/null +++ b/.github/workflows/black.yml @@ -0,0 +1,37 @@ +name: black + +on: + pull_request: + paths: + - '**.py' + +defaults: + run: + shell: bash + +jobs: + black: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.x + - name: Install black + run: | + python -m pip install --upgrade pip + pip install black + - name: Version + run: | + python --version + black --version + - name: Run black + run: | + black xbout + black docs + - uses: stefanzweifel/git-auto-commit-action@v4 + with: + commit_message: "Apply black formatting" diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 6f752708..80a01b53 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -89,21 +89,3 @@ jobs: # run: | # pip install flake8 # flake8 - - - black: - - runs-on: ubuntu-latest - if: always() - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - - name: Check formatting with black - run: | - pip install black - black --check . diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 38625b7e..7708b822 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -88,21 +88,3 @@ jobs: # run: | # pip install flake8 # flake8 - - - black: - - runs-on: ubuntu-latest - if: always() - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - - name: Check formatting with black - run: | - pip install black - black --check . From 60587de5480f3ee9b6f81ca343e4161a619341a2 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 14:37:33 +0100 Subject: [PATCH 15/42] Don't use `animatplot-ng` in `pytest-min-version` Also explicitly use py 3.11 in tests Co-authored-by: David Bold --- .github/workflows/master.yml | 4 ++-- .github/workflows/pythonpackage.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 80a01b53..345c3ab9 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -20,7 +20,7 @@ jobs: if: always() strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.x"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.x"] fail-fast: false steps: @@ -50,7 +50,7 @@ jobs: matrix: python-version: ["3.8"] pip-packages: - - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot-ng==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" + - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" fail-fast: false steps: diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 7708b822..524a3a2a 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -50,7 +50,7 @@ jobs: matrix: python-version: ["3.8"] pip-packages: - - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot-ng==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" + - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" fail-fast: false steps: From 42116fadc3c9afc08874f89b5d1a834d4c34b79d Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 15:10:11 +0100 Subject: [PATCH 16/42] CI: Install numpy before netcdf4 for pytest-min-version This version of netcdf4 doesn't have a binary wheel for py3.8, so gets built from source and needs numpy at build-time. Later versions do specify numpy as a built requirement. --- .github/workflows/master.yml | 6 +++--- .github/workflows/pythonpackage.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 345c3ab9..28a89fac 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -49,8 +49,6 @@ jobs: strategy: matrix: python-version: ["3.8"] - pip-packages: - - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" fail-fast: false steps: @@ -63,7 +61,9 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} + # Install numpy first because this version of netcdf4 will install from source and needs it at build-time + pip install --upgrade numpy==1.18.0 + pip install --upgrade boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.5.0 Pillow==6.1.0 - name: Install package run: | pip install -e .[tests] diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 524a3a2a..2c314b88 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -49,8 +49,6 @@ jobs: strategy: matrix: python-version: ["3.8"] - pip-packages: - - "boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" fail-fast: false steps: @@ -63,7 +61,9 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} + # Install numpy first because this version of netcdf4 will install from source and needs it at build-time + pip install --upgrade numpy==1.18.0 + pip install --upgrade boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.5.0 Pillow==6.1.0 - name: Install package run: | pip install -e .[tests] From 45e01e0f7985961209eb7f1750bb55508305128d Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 15:58:59 +0100 Subject: [PATCH 17/42] CI: Only install oldest xarray in min version test --- .github/workflows/master.yml | 6 ++---- .github/workflows/pythonpackage.yml | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 28a89fac..c23fe9ca 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -42,7 +42,7 @@ jobs: # test with oldest supported version of packages - pytest-min-versions: + pytest-oldest-xarray: runs-on: ubuntu-latest if: always() @@ -61,9 +61,7 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - # Install numpy first because this version of netcdf4 will install from source and needs it at build-time - pip install --upgrade numpy==1.18.0 - pip install --upgrade boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.5.0 Pillow==6.1.0 + pip install xarray==0.18.0 - name: Install package run: | pip install -e .[tests] diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 2c314b88..de3cfaf5 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -42,7 +42,7 @@ jobs: # test with oldest supported version of packages - pytest-min-versions: + pytest-oldest-xarray: runs-on: ubuntu-latest if: always() @@ -61,9 +61,7 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - # Install numpy first because this version of netcdf4 will install from source and needs it at build-time - pip install --upgrade numpy==1.18.0 - pip install --upgrade boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.5.0 Pillow==6.1.0 + pip install xarray==0.18.0 - name: Install package run: | pip install -e .[tests] From ea879a09f5ca8d487d1fa4a17f8360f0dbcc4645 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 17:36:34 +0100 Subject: [PATCH 18/42] CI: Force compatible version of pandas for min-version xarray --- .github/workflows/master.yml | 2 +- .github/workflows/pythonpackage.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index c23fe9ca..ad1fde43 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -61,7 +61,7 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install xarray==0.18.0 + pip install xarray~=0.18.0 pandas~=1.4.0 - name: Install package run: | pip install -e .[tests] diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index de3cfaf5..2dfd84f4 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -61,7 +61,7 @@ jobs: run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install xarray==0.18.0 + pip install xarray~=0.18.0 pandas~=1.4.0 - name: Install package run: | pip install -e .[tests] From 515c7f852142dd2b5f48dff01c1a896ee365fce5 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 09:46:33 +0100 Subject: [PATCH 19/42] Docs: Update RTD config to v2 --- .readthedocs.yml | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index f2f8fd70..a8743fec 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,8 +1,28 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need build: - image: latest + os: ubuntu-22.04 + tools: + python: "3.11" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Optionally build your docs in additional formats such as PDF and ePub +# formats: +# - pdf +# - epub +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: - version: 3.8 - pip_install: true - extra_requirements: - - docs + install: + - method: pip + path: . + extra_requirements: + - docs From 18d77319466cb61d6307fb2f75ec7818c77ec41e Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 09:51:12 +0100 Subject: [PATCH 20/42] Docs: Fix xarray accessor link Fixes #287 --- docs/accessor_methods.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/accessor_methods.rst b/docs/accessor_methods.rst index 6191c4f8..3d96426d 100644 --- a/docs/accessor_methods.rst +++ b/docs/accessor_methods.rst @@ -17,4 +17,4 @@ or:: ds.bout.create_restarts(savepath='.', nxpe=4, nype=4) -.. _accessor: https://xarray.pydata.org/en/stable/internals.html#extending-xarray +.. _accessor: https://docs.xarray.dev/en/stable/internals/extending-xarray.html From 6ead3ffa1b7ac6c5ae405d2f8c68c730351f17a5 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 09:52:54 +0100 Subject: [PATCH 21/42] Docs: Fix missing `language` and wrong static dir in sphinx config --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 22fdd208..377ea403 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -68,7 +68,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -99,7 +99,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = ["images"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. From 7d89bda14dd58402f7774dcce086604019656371 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 10:45:56 +0100 Subject: [PATCH 22/42] Docs: Fix lots of sphinx warnings --- docs/accessor_methods.rst | 2 +- docs/conf.py | 8 ++++++++ docs/extending_xbout.rst | 6 +++--- docs/index.rst | 6 +++--- docs/loading_data.rst | 2 +- docs/xbout.rst | 8 ++++++++ xbout/boutdataarray.py | 15 +++++++++------ xbout/boutdataset.py | 36 +++++++++++++++++++----------------- xbout/load.py | 33 +++++++++++++++++---------------- xbout/plotting/animate.py | 3 +++ 10 files changed, 72 insertions(+), 47 deletions(-) diff --git a/docs/accessor_methods.rst b/docs/accessor_methods.rst index 3d96426d..5d664b42 100644 --- a/docs/accessor_methods.rst +++ b/docs/accessor_methods.rst @@ -2,7 +2,7 @@ BoutDataset Accessor Methods ============================ xBOUT defines a set of accessor_ methods on the loaded Datasets and -DataArrays, which are called by `ds.bout.method`. +DataArrays, which are called by ``ds.bout.method``. This is where BOUT-specific data manipulation, analysis and plotting functionality is stored, for example:: diff --git a/docs/conf.py b/docs/conf.py index 377ea403..bf2e6988 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -49,6 +49,7 @@ "sphinx.ext.mathjax", "sphinx.ext.viewcode", "sphinx.ext.napoleon", + "sphinx.ext.intersphinx", ] # Add any paths that contain templates here, relative to this directory. @@ -169,3 +170,10 @@ # -- Extension configuration ------------------------------------------------- + +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), + "numpy": ("https://numpy.org/doc/stable", None), + "scipy": ("https://docs.scipy.org/doc/scipy", None), + "xarray": ("https://docs.xarray.dev/en/latest", None), + } diff --git a/docs/extending_xbout.rst b/docs/extending_xbout.rst index d158dbc1..81059129 100644 --- a/docs/extending_xbout.rst +++ b/docs/extending_xbout.rst @@ -3,12 +3,12 @@ Extending xBOUT for your BOUT module The accessor classes `BoutDatasetAccessor` and `BoutDataArrayAccessor` are intended to be subclassed for specific BOUT++ modules. The -subclass accessor will then inherit all the `.bout` accessor methods, +subclass accessor will then inherit all the ``.bout`` accessor methods, but you will also be able to override these and define your own methods within your new accessor. -For example to add an extra method specific to the `STORM` BOUT++ +For example to add an extra method specific to the ``STORM`` BOUT++ module:: from xarray import register_dataset_accessor @@ -26,7 +26,7 @@ module:: >>> ds.storm.special_method() Do something only STORM users would want to do -There is included an example of a `StormDataset` which contains all +There is included an example of a ``StormDataset`` which contains all the data from a STORM_ simulation, as well as extra calculated quantities which are specific to the STORM module. diff --git a/docs/index.rst b/docs/index.rst index f96b3e7d..3eebf689 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -27,19 +27,19 @@ suggestions. Installation ------------ -With `pip`: +With ``pip``: .. code-block:: bash pip install --user xbout -or `conda`: +or ``conda``: .. code-block:: bash conda install -c conda-forge xbout -You can run the tests by running `pytest --pyargs xbout`. +You can run the tests by running ``pytest --pyargs xbout``. xBOUT will install the required python packages when you run one of the above install commands if they are not already installed on your diff --git a/docs/loading_data.rst b/docs/loading_data.rst index 07e8f99b..f20eff3d 100644 --- a/docs/loading_data.rst +++ b/docs/loading_data.rst @@ -10,6 +10,6 @@ The data from a BOUT++ run can be loaded with just:: bd = open_boutdataset('./run_dir*/BOUT.dmp.*.nc', inputfilepath='./BOUT.inp') `open_boutdataset` returns an instance of an `xarray.Dataset` which -contains BOUT-specific information in the `attrs`, so represents a +contains BOUT-specific information in the ``attrs``, so represents a general structure for storing all of the output of a simulation, including data, input options and (soon) grid data. diff --git a/docs/xbout.rst b/docs/xbout.rst index 8bc4f536..989aa6f9 100644 --- a/docs/xbout.rst +++ b/docs/xbout.rst @@ -17,6 +17,14 @@ xbout.boutdataset module :undoc-members: :show-inheritance: +xbout.geometries module +----------------------- + +.. automodule:: xbout.geometries + :members: + :undoc-members: + :show-inheritance: + xbout.load module ----------------- diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 7e57cf1e..0b293bb8 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -29,7 +29,7 @@ class BoutDataArrayAccessor: selecting a variable from a BOUT++ dataset. These BOUT-specific methods and attributes are accessed via the bout - accessor, e.g. `da.bout.options` returns a `BoutOptionsFile` instance. + accessor, e.g. ``da.bout.options`` returns a `BoutOptionsFile` instance. """ def __init__(self, da): @@ -698,6 +698,7 @@ def animate2D( If set to false, do not create the animation, just return the block or blocks axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -705,6 +706,7 @@ def animate2D( coordinate (which must have the dimension given by 'key'), or a 1d numpy array, dask array or DataArray whose length matches the length of the dimension given by 'key'. + Only affects time coordinate for plots with poloidal_plot=True. fps : int, optional Frames per second of resulting gif @@ -815,6 +817,7 @@ def animate1D( Dimension over which to animate, defaults to the time dimension axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -834,7 +837,7 @@ def animate1D( A matplotlib axes instance to plot to. If None, create a new figure and axes, and plot to that aspect : str or None, optional - Argument to set_aspect(), defaults to "auto" + Argument to ``ax.set_aspect()``, defaults to "auto" kwargs : dict, optional Additional keyword arguments are passed on to the plotting function (animatplot.blocks.Line). @@ -842,7 +845,7 @@ def animate1D( Returns ------- animation or block - If animate==True, returns an animatplot.Animation object, otherwise + If ``animate==True``, returns an animatplot.Animation object, otherwise returns an animatplot.blocks.Line instance. """ @@ -1025,7 +1028,7 @@ def interpolate_to_cartesian(self, *args, **kwargs): This method is intended to be used to produce data for visualisation, which normally does not require double-precision values, so by default the data is - converted to `np.float32`. Pass `use_float32=False` to retain the original + converted to `numpy.float32`. Pass ``use_float32=False`` to retain the original precision. Parameters @@ -1037,10 +1040,10 @@ def interpolate_to_cartesian(self, *args, **kwargs): nZ : int (default 100) Number of grid points in the Z direction use_float32 : bool (default True) - Downgrade precision to `np.float32`? + Downgrade precision to `numpy.float32`? fill_value : float (default np.nan) Value to use for points outside the interpolation domain (passed to - `scipy.RegularGridInterpolator`) + `scipy.interpolate.RegularGridInterpolator`) See Also -------- diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 6540fe78..c7fe5e69 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -41,7 +41,7 @@ class BoutDatasetAccessor: `open_boutdataset()`. These BOUT-specific methods and attributes are accessed via the bout - accessor, e.g. `ds.bout.options` returns a `BoutOptionsFile` instance. + accessor, e.g. ``ds.bout.options`` returns a `BoutOptionsFile` instance. """ def __init__(self, ds): @@ -277,18 +277,18 @@ def integrate_midpoints(self, variable, *, dims=None, cumulative_t=False): The quantity being integrated is assumed to be a scalar variable. When doing a 1d integral in the 'y' dimension, the integral is calculated as a - poloidal integral if the variable is on the standard grid ('direction_y' + poloidal integral if the variable is on the standard grid (``direction_y`` attribute is "Standard"), or as a parallel-to-B integral if the variable is on - the field-aligned grid ('direction_y' attribute is "Aligned"). + the field-aligned grid (``direction_y`` attribute is "Aligned"). When doing a 2d integral over 'x' and 'y' dimensions, the integral will be over - poloidal cross-sections if the variable is not field-aligned (direction_y == - "Standard") and over field-aligned surfaces if the variable is field-aligned - (direction_ == "Aligned"). The latter seems unlikely to be useful as the + poloidal cross-sections if the variable is not field-aligned (``direction_y == + "Standard"``) and over field-aligned surfaces if the variable is field-aligned + (``direction_ == "Aligned"``). The latter seems unlikely to be useful as the surfaces depend on the arbitrary origin used for zShift. Is a method of BoutDataset accessor rather than of BoutDataArray so we can use - other variables like `J`, `g11`, `g_22` for the integration. + other variables like :math:`J`, :math:`g^{11}`, :math:`g_{22}` for the integration. Note the xarray.DataArray.integrate() method uses the trapezium rule, which is not consistent with the way BOUT++ defines grid spacings as cell widths. Also, @@ -557,7 +557,7 @@ def interpolate_to_cartesian( This method is intended to be used to produce data for visualisation, which normally does not require double-precision values, so by default the data is - converted to `np.float32`. Pass `use_float32=False` to retain the original + converted to `numpy.float32`. Pass ``use_float32=False`` to retain the original precision. Parameters @@ -569,10 +569,10 @@ def interpolate_to_cartesian( nZ : int (default 100) Number of grid points in the Z direction use_float32 : bool (default True) - Downgrade precision to `np.float32`? + Downgrade precision to `numpy.float32`? fill_value : float (default np.nan) Value to use for points outside the interpolation domain (passed to - `scipy.RegularGridInterpolator`) + `scipy.interpolate.RegularGridInterpolator`) See Also -------- @@ -813,10 +813,10 @@ def save( Examples -------- - If `separate_vars=True`, then multiple files will be created. These can - all be opened and merged in one go using a call of the form: + If ``separate_vars=True``, then multiple files will be created. These can + all be opened and merged in one go using a call of the form:: - ds = xr.open_mfdataset('boutdata_*.nc', combine='nested', concat_dim=None) + ds = xr.open_mfdataset('boutdata_*.nc', combine='nested', concat_dim=None) """ if variables is None: @@ -976,10 +976,10 @@ def to_restart( tind : int, default -1 Time-index of the slice to write to the restart files. Note, when creating restart files from 'dump' files it is recommended to open the Dataset using - the full time range and use the `tind` argument here, rather than selecting - a time point manually, so that the calculation of `hist_hi` in the output - can be correct (which requires knowing the existing value of `hist_hi` - (output step count at the end of the simulation), `tind` and the total + the full time range and use the ``tind`` argument here, rather than selecting + a time point manually, so that the calculation of ``hist_hi`` in the output + can be correct (which requires knowing the existing value of ``hist_hi`` + (output step count at the end of the simulation), ``tind`` and the total number of time points in the current output data). prefix : str, default "BOUT.restart" Prefix to use for names of restart files @@ -1061,6 +1061,7 @@ def animate_list( grid coordinates, per variable if sequence is given axis_coords : None, str, dict or list of None, str or dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -1068,6 +1069,7 @@ def animate_list( coordinate (which must have the dimension given by 'key'), or a 1d numpy array, dask array or DataArray whose length matches the length of the dimension given by 'key'. + Only affects time coordinate for plots with poloidal_plot=True. If a list is passed, it must have the same length as 'variables' and gives the axis_coords setting for each plot individually. diff --git a/xbout/load.py b/xbout/load.py index 66908434..26ab8c3d 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -91,9 +91,9 @@ def open_boutdataset( file. Can also load from a grid file or from restart files. Note that when reloading a Dataset that was saved by xBOUT, the state of the saved - Dataset is restored, and the values of `keep_xboundaries`, `keep_yboundaries`, and - `run_name` are ignored. `geometry` is treated specially, and can be passed when - reloading a Dataset (along with `gridfilepath` if needed). + Dataset is restored, and the values of ``keep_xboundaries``, ``keep_yboundaries``, and + ``run_name`` are ignored. ``geometry`` is treated specially, and can be passed when + reloading a Dataset (along with ``gridfilepath`` if needed). Troubleshooting --------------- @@ -101,14 +101,14 @@ def open_boutdataset( some variables may have conflicts (e.g. a source term was changed between some of the restarts, but the source term is saved as time-independent, without a t-dimension). In this case one workaround is to pass a list of variable names to the - keyword argument `drop_vars` to ignore the variables with conflicts, e.g. if `"S1"` - and `"S2"` have conflicts - ``` - ds = open_boutdataset("data*/boutdata.nc", drop_variables=["S1", "S2"]) - ``` - will open a Dataset which is missing `"S1"` and `"S2"`.\ - [`drop_variables` is an argument of `xarray.open_dataset()` that is passed down - through `kwargs`.] + keyword argument ``drop_vars`` to ignore the variables with conflicts, e.g. if ``"S1"`` + and ``"S2"`` have conflicts:: + + ds = open_boutdataset("data*/boutdata.nc", drop_variables=["S1", "S2"]) + + will open a Dataset which is missing ``"S1"`` and ``"S2"`` + (``drop_variables`` is an argument of `xarray.open_dataset` that is passed down + through ``kwargs``.) Parameters ---------- @@ -131,11 +131,12 @@ def open_boutdataset( If not specified then will attempt to read it from the file attrs. If still not found then a warning will be thrown, which can be - suppressed by passing `info`=False. + suppressed by passing ``info=False``. To define a new type of geometry you need to use the - `register_geometry` decorator. You are encouraged to do this for your - own BOUT++ physics module, to apply relevant normalisations. + `register_geometry` decorator. You are encouraged to do + this for your own BOUT++ physics module, to apply relevant + normalisations. gridfilepath : str, optional The path to a grid file, containing any variables needed to apply the geometry specified by the 'geometry' option, which are not contained in the dump files. @@ -163,8 +164,8 @@ def open_boutdataset( is_restart : bool, optional Restart files require some special handling (e.g. working around variables that are not present in restart files). By default, this special handling is enabled - if the files do not have a time dimension and `restart` is present in the file - name in `datapath`. This option can be set to True or False to explicitly enable + if the files do not have a time dimension and ``restart`` is present in the file + name in ``datapath``. This option can be set to True or False to explicitly enable or disable the restart file handling. kwargs : optional Keyword arguments are passed down to `xarray.open_mfdataset`, which in diff --git a/xbout/plotting/animate.py b/xbout/plotting/animate.py index 96a7289a..68536a97 100644 --- a/xbout/plotting/animate.py +++ b/xbout/plotting/animate.py @@ -138,6 +138,7 @@ def animate_poloidal( Colors to use for the plot axis_coords : None, str, dict Coordinates to use for axis labelling. Only affects time coordinate. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -342,6 +343,7 @@ def animate_pcolormesh( If set to false, do not create the animation, just return the block axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -543,6 +545,7 @@ def animate_line( If set to false, do not create the animation, just return the block axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis From f7273d4eef70e78535db14deb282150ce6434cb6 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 10:47:28 +0100 Subject: [PATCH 23/42] Docs: black formatting --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index bf2e6988..ad7c2a1c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -176,4 +176,4 @@ "numpy": ("https://numpy.org/doc/stable", None), "scipy": ("https://docs.scipy.org/doc/scipy", None), "xarray": ("https://docs.xarray.dev/en/latest", None), - } +} From c2c835295ebb8330cc9afa5225dc789c0cb0cb10 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 11:28:04 +0100 Subject: [PATCH 24/42] Docs: Use sphinx-book-theme --- docs/conf.py | 15 ++++++++++----- pyproject.toml | 1 + 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ad7c2a1c..c8231636 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -86,16 +86,21 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -if on_rtd: - html_theme = "default" -else: - html_theme = "alabaster" +html_theme = "sphinx_book_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -# html_theme_options = {} +html_theme_options = { + "repository_url": "https://github.com/boutproject/xBOUT", + "repository_branch": "master", + "path_to_docs": "docs", + "use_edit_page_button": True, + "use_repository_button": True, + "use_issues_button": True, + "home_page_in_toc": False, +} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/pyproject.toml b/pyproject.toml index 128c96f4..313a7042 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ calc = [ ] docs = [ "sphinx >= 1.4", + "sphinx-book-theme >= 0.4.0rc1", ] 3d_plot = [ "k3d >= 2.8.0", From 464d1e954d2766adf1ab1b9b62b1a04683628db6 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 11:30:20 +0100 Subject: [PATCH 25/42] Docs: Get version from importlib.metadata --- docs/conf.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c8231636..bc7e40a0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,27 +12,19 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - -# Are we running on readthedocs? -on_rtd = os.environ.get("READTHEDOCS") == "True" +from importlib.metadata import version as get_version # -- Project information ----------------------------------------------------- -import xbout - project = "xBOUT" copyright = "2018, Tom Nicholas" author = "Tom Nicholas" -# The short X.Y version -version = xbout.__version__ # The full version, including alpha/beta/rc tags -release = xbout.__version__ - +release = get_version("xbout") +# The short X.Y version +version = ".".join(release.split(".")[:2]) # -- General configuration --------------------------------------------------- From f7f64b88e48c254d20949732f83904fb4a122dd3 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 11:30:30 +0100 Subject: [PATCH 26/42] Docs: Update copyright notice --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index bc7e40a0..1088bc96 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "xBOUT" -copyright = "2018, Tom Nicholas" +copyright = "2018-2023, Tom Nicholas, BOUT++ team" author = "Tom Nicholas" # The full version, including alpha/beta/rc tags From 8f6c2ab43a667273f92b23c9aa10f37be4e1ce48 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 11:39:32 +0100 Subject: [PATCH 27/42] Docs: Use `sphinx.ext.autosummary` to generate API docs --- .gitignore | 1 + docs/_templates/custom-class-template.rst | 32 +++++++++++ docs/_templates/custom-module-template.rst | 66 ++++++++++++++++++++++ docs/api.rst | 10 ++++ docs/conf.py | 15 +++++ docs/index.rst | 7 ++- docs/xbout.rst | 50 ---------------- pyproject.toml | 3 +- 8 files changed, 132 insertions(+), 52 deletions(-) create mode 100644 docs/_templates/custom-class-template.rst create mode 100644 docs/_templates/custom-module-template.rst create mode 100644 docs/api.rst delete mode 100644 docs/xbout.rst diff --git a/.gitignore b/.gitignore index 907b66d0..c79ebe7d 100644 --- a/.gitignore +++ b/.gitignore @@ -68,6 +68,7 @@ instance/ # Sphinx documentation docs/_build/ +docs/generated # PyBuilder target/ diff --git a/docs/_templates/custom-class-template.rst b/docs/_templates/custom-class-template.rst new file mode 100644 index 00000000..4a2889ec --- /dev/null +++ b/docs/_templates/custom-class-template.rst @@ -0,0 +1,32 @@ +{{ fullname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :undoc-members: + :show-inheritance: + + {% block methods %} + .. automethod:: __init__ + + {% if methods %} + .. rubric:: {{ _('Methods') }} + + .. autosummary:: + {% for item in methods %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Attributes') }} + + .. autosummary:: + {% for item in attributes %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/_templates/custom-module-template.rst b/docs/_templates/custom-module-template.rst new file mode 100644 index 00000000..f46f9f63 --- /dev/null +++ b/docs/_templates/custom-module-template.rst @@ -0,0 +1,66 @@ +{{ fullname | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Module Attributes') }} + + .. autosummary:: + :toctree: + {% for item in attributes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block functions %} + {% if functions %} + .. rubric:: {{ _('Functions') }} + + .. autosummary:: + :toctree: + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: {{ _('Classes') }} + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + .. autosummary:: + :toctree: + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + +{% block modules %} +{% if modules %} +.. rubric:: Modules + +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: +{% for item in modules %} + {{ item }} +{%- endfor %} +{% endif %} +{% endblock %} diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..ed1cca92 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,10 @@ +=============== + API Reference +=============== + +.. autosummary:: + :toctree: generated + :template: custom-module-template.rst + :recursive: + + xbout diff --git a/docs/conf.py b/docs/conf.py index 1088bc96..c9f8cd63 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -37,13 +37,28 @@ # ones. extensions = [ "sphinx.ext.autodoc", + "sphinx.ext.autosummary", "sphinx.ext.coverage", "sphinx.ext.mathjax", "sphinx.ext.viewcode", "sphinx.ext.napoleon", "sphinx.ext.intersphinx", + "sphinx_autodoc_typehints", ] +autosummary_generate = True +autodoc_default_options = { + 'ignore-module-all': True +} +autodoc_typehints = "description" +autodoc_class_signature = "mixed" + +# Numpy-doc config +napoleon_google_docstring = False +napoleon_numpy_docstring = True +napoleon_attr_annotations = True +napoleon_preprocess_types = True + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] diff --git a/docs/index.rst b/docs/index.rst index 3eebf689..cd4dbe68 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -22,7 +22,12 @@ suggestions. loading_data accessor_methods extending_xbout - xbout + +.. toctree:: + :maxdepth: 2 + :caption: Reference + + API Reference Installation ------------ diff --git a/docs/xbout.rst b/docs/xbout.rst deleted file mode 100644 index 989aa6f9..00000000 --- a/docs/xbout.rst +++ /dev/null @@ -1,50 +0,0 @@ -API reference -============= - -xbout.boutdataarray module --------------------------- - -.. automodule:: xbout.boutdataarray - :members: - :undoc-members: - :show-inheritance: - -xbout.boutdataset module ------------------------- - -.. automodule:: xbout.boutdataset - :members: - :undoc-members: - :show-inheritance: - -xbout.geometries module ------------------------ - -.. automodule:: xbout.geometries - :members: - :undoc-members: - :show-inheritance: - -xbout.load module ------------------ - -.. automodule:: xbout.load - :members: - :undoc-members: - :show-inheritance: - -xbout.plotting.animate module ------------------------------ - -.. automodule:: xbout.plotting.animate - :members: - :undoc-members: - :show-inheritance: - -xbout.plotting.utils module ---------------------------- - -.. automodule:: xbout.plotting.utils - :members: - :undoc-members: - :show-inheritance: diff --git a/pyproject.toml b/pyproject.toml index 313a7042..f70fe22a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,8 +55,9 @@ calc = [ "xhistogram", ] docs = [ - "sphinx >= 1.4", + "sphinx >= 5.3", "sphinx-book-theme >= 0.4.0rc1", + "sphinx_autodoc_typehints >= 1.19", ] 3d_plot = [ "k3d >= 2.8.0", From e9ab305243893f7f1b1446b71af258fc4280f1e9 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 12:04:21 +0100 Subject: [PATCH 28/42] Docs: Fix a bunch more rst errors --- docs/conf.py | 1 + xbout/plotting/plotfuncs.py | 9 ++++--- xbout/tests/test_fastoutput.py | 2 +- xbout/tests/test_load.py | 48 ++++++++++++++++++++-------------- 4 files changed, 35 insertions(+), 25 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c9f8cd63..e3cc2c25 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -188,4 +188,5 @@ "numpy": ("https://numpy.org/doc/stable", None), "scipy": ("https://docs.scipy.org/doc/scipy", None), "xarray": ("https://docs.xarray.dev/en/latest", None), + "mayavi": ("https://docs.enthought.com/mayavi/mayavi/", None), } diff --git a/xbout/plotting/plotfuncs.py b/xbout/plotting/plotfuncs.py index 9cf371d2..498064c9 100644 --- a/xbout/plotting/plotfuncs.py +++ b/xbout/plotting/plotfuncs.py @@ -102,7 +102,7 @@ def plot2d_wrapper( """ Make a 2D plot using an xarray method, taking into account branch cuts (X-points). - Wraps `xarray.plot` methods, so automatically adds labels. + Wraps `xarray.DataArray.plot` methods, so automatically adds labels. Parameters ---------- @@ -433,15 +433,16 @@ def plot3d( mayavi_figure : mayavi.core.scene.Scene, default None Existing Mayavi figure to add this plot to. mayavi_figure_args : dict, default None - Arguments to use when creating a new Mayavi figure. Ignored if `mayavi_figure` + Arguments to use when creating a new Mayavi figure. Ignored if ``mayavi_figure`` is passed. mayavi_view : (float, float, float), default None - If set, arguments are passed to mlab.view() to set the view when engine="mayavi" + If set, arguments are passed to `mayavi.mlab.view` to set the + view when ``engine="mayavi"`` vmin, vmax : float vmin and vmax are treated specially. If a float is passed, then it is used for vmin/vmax. If the arguments are not passed, then the minimum and maximum of the data are used. For an animation, to get minimum and/or maximum calculated - separately for each frame, pass `vmin=None` and/or `vmax=None` explicitly. + separately for each frame, pass ``vmin=None`` and/or ``vmax=None`` explicitly. **kwargs Extra keyword arguments are passed to the backend plotting function """ diff --git a/xbout/tests/test_fastoutput.py b/xbout/tests/test_fastoutput.py index 398c0760..b2e121d3 100644 --- a/xbout/tests/test_fastoutput.py +++ b/xbout/tests/test_fastoutput.py @@ -92,7 +92,7 @@ def make_fastoutput_set(path, n): def make_fastoutput(path, i, locations): """ Create a single file in the format produced by - [`FastOutput`](https://github.com/johnomotani/BoutFastOutput) + `FastOutput `_ Parameters ---------- diff --git a/xbout/tests/test_load.py b/xbout/tests/test_load.py index 6621695c..64fdfdb5 100644 --- a/xbout/tests/test_load.py +++ b/xbout/tests/test_load.py @@ -1291,11 +1291,13 @@ def test_infer_boundaries_2d_parallelization( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ ds = create_test_data(0) @@ -1320,11 +1322,13 @@ def test_infer_boundaries_2d_parallelization_doublenull( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ ds = create_test_data(0) @@ -1351,11 +1355,13 @@ def test_infer_boundaries_2d_parallelization_by_filenum( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ filenum = yproc * nxpe + xproc @@ -1381,11 +1387,13 @@ def test_infer_boundaries_2d_parallelization_doublenull_by_filenum( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ filenum = yproc * nxpe + xproc From 9bdf27000b1e93ffcaad13d2fb5a8335d2e336c8 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 12:07:29 +0100 Subject: [PATCH 29/42] Docs: Add some links to accessor method docs --- docs/accessor_methods.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/accessor_methods.rst b/docs/accessor_methods.rst index 5d664b42..6c9fea07 100644 --- a/docs/accessor_methods.rst +++ b/docs/accessor_methods.rst @@ -1,8 +1,8 @@ BoutDataset Accessor Methods ============================ -xBOUT defines a set of accessor_ methods on the loaded Datasets and -DataArrays, which are called by ``ds.bout.method``. +xBOUT defines a set of accessor_ methods on the loaded `xarray.Dataset` and +`xarray.DataArray`, which are called by ``ds.bout.``. This is where BOUT-specific data manipulation, analysis and plotting functionality is stored, for example:: @@ -17,4 +17,7 @@ or:: ds.bout.create_restarts(savepath='.', nxpe=4, nype=4) +See `BoutDatasetAccessor` and `BoutDataArrayAccessor` for details on +the available methods. + .. _accessor: https://docs.xarray.dev/en/stable/internals/extending-xarray.html From 2daadb7710d2bf36678b2f147e762c15373c109b Mon Sep 17 00:00:00 2001 From: ZedThree Date: Thu, 27 Jul 2023 11:08:28 +0000 Subject: [PATCH 30/42] Apply black formatting --- docs/conf.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index e3cc2c25..25548c6e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -47,9 +47,7 @@ ] autosummary_generate = True -autodoc_default_options = { - 'ignore-module-all': True -} +autodoc_default_options = {"ignore-module-all": True} autodoc_typehints = "description" autodoc_class_signature = "mixed" From 39304b7631d94c5f616b198769249d72f58417aa Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 14:00:19 +0100 Subject: [PATCH 31/42] CI: Fix typo in publish workflow [skip ci] --- .github/workflows/pythonpublish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 3f2ebad4..3572a23c 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -25,6 +25,6 @@ jobs: sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip build - name: Build package - runs: python -m build --sdist --wheel + run: python -m build --sdist --wheel - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 From b3ec1cc64266979b2ece509929e753cb891638ef Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 15:11:50 +0100 Subject: [PATCH 32/42] CI: Add ruff config --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index f70fe22a..b921371d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,3 +79,6 @@ write_to = "xbout/_version.py" [tool.setuptools] packages = ["xbout"] + +[tool.ruff] +ignore = ["E501"] From 669eb07bf52a584c19c1406672dde261e4273ed0 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 15:12:30 +0100 Subject: [PATCH 33/42] Don't try backport of importlib.metadata In py3.8, which is now our min version --- xbout/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/xbout/__init__.py b/xbout/__init__.py index 917706b8..8146f1ee 100644 --- a/xbout/__init__.py +++ b/xbout/__init__.py @@ -11,10 +11,8 @@ from .fastoutput import open_fastoutput -try: - from importlib.metadata import version, PackageNotFoundError -except ModuleNotFoundError: - from importlib_metadata import version, PackageNotFoundError +from importlib.metadata import version, PackageNotFoundError + try: __version__ = version(__name__) except PackageNotFoundError: From 9ff009b1a1f708bf02f9b69557900b15bc3ba1ee Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 15:14:55 +0100 Subject: [PATCH 34/42] Add some imports to `__all__` --- xbout/__init__.py | 14 ++++++++++++++ xbout/calc/__init__.py | 2 ++ 2 files changed, 16 insertions(+) diff --git a/xbout/__init__.py b/xbout/__init__.py index 8146f1ee..9511b93d 100644 --- a/xbout/__init__.py +++ b/xbout/__init__.py @@ -19,3 +19,17 @@ from setuptools_scm import get_version __version__ = get_version(root="..", relative_to=__file__) + +__all__ = [ + "open_boutdataset", + "collect", + "geometries", + "register_geometry", + "REGISTERED_GEOMETRIES", + "BoutDataArrayAccessor", + "BoutDatasetAccessor", + "animate_pcolormesh", + "animate_poloidal", + "plot_separatrix", + "open_fastoutput", +] diff --git a/xbout/calc/__init__.py b/xbout/calc/__init__.py index 8333be7a..912a423e 100644 --- a/xbout/calc/__init__.py +++ b/xbout/calc/__init__.py @@ -1 +1,3 @@ from .turbulence import rms + +__all__ = ["rms"] From b02aa7c3364f21b439d36f48fc06d843e2be460e Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 15:29:40 +0100 Subject: [PATCH 35/42] Apply ruff fixes Mostly unused variables or imports --- xbout/boutdataarray.py | 2 -- xbout/boutdataset.py | 4 ---- xbout/geometries.py | 5 ++--- xbout/plotting/plotfuncs.py | 12 +++++------- xbout/plotting/utils.py | 1 - xbout/region.py | 7 +------ xbout/tests/test_animate.py | 4 +--- xbout/tests/test_boutdataarray.py | 8 +++----- xbout/tests/test_boutdataset.py | 10 ++-------- xbout/tests/test_fastoutput.py | 2 -- xbout/tests/test_grid.py | 2 +- xbout/tests/test_load.py | 13 +------------ xbout/tests/test_plot.py | 2 -- xbout/tests/test_region.py | 9 --------- xbout/utils.py | 5 ----- 15 files changed, 16 insertions(+), 70 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 0b293bb8..1416ec84 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -306,7 +306,6 @@ def interpolate_parallel( # Select a particular 'region' and interpolate to higher parallel resolution da = self.data region = da.bout._regions[region] - tcoord = da.metadata["bout_tdim"] xcoord = da.metadata["bout_xdim"] ycoord = da.metadata["bout_ydim"] zcoord = da.metadata["bout_zdim"] @@ -950,7 +949,6 @@ def interpolate_from_unstructured( method = "linear" # extend input coordinates to cover all dims, so we can flatten them - input_coords = [] for coord in kwargs: data = da[coord] missing_dims = tuple(set(dims) - set(data.dims)) diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index c7fe5e69..8f8dc1db 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -619,7 +619,6 @@ def interpolate_to_cartesian( from scipy.interpolate import ( RegularGridInterpolator, - griddata, ) # Create Cylindrical coordinates for intermediate grid @@ -681,9 +680,6 @@ def interp_single_time(da): # just use da.min().item() here (to get a scalar value instead of a # zero-size array) because .item() doesn't work for dask arrays (yet!). - datamin = float_type(da.min().values) - datamax = float_type(da.max().values) - if tdim in da.dims: data_cartesian = np.zeros((nt, nX, nY, nZ), dtype=float_type) for tind in range(nt): diff --git a/xbout/geometries.py b/xbout/geometries.py index 1cc6d727..7a7f8ad7 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -4,7 +4,7 @@ import xarray as xr import numpy as np -from .region import Region, _create_regions_toroidal, _create_single_region +from .region import _create_regions_toroidal, _create_single_region from .utils import ( _add_attrs_to_var, _set_attrs_on_all_vars, @@ -156,7 +156,6 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): _add_attrs_to_var(updated_ds, xcoord) if ycoord not in updated_ds.coords: - ny = updated_ds.dims[ycoord] # dy should always be constant in x, so it is safe to convert to a 1d # coordinate. [The y-coordinate has to be a 1d coordinate that labels x-z # slices of the grid (similarly x-coordinate is 1d coordinate that labels y-z @@ -206,7 +205,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): if bout_v5: if not np.all(updated_ds["dz"].min() == updated_ds["dz"].max()): raise ValueError( - f"Spacing is not constant. Cannot create z coordinate" + "Spacing is not constant. Cannot create z coordinate" ) dz = updated_ds["dz"].min() diff --git a/xbout/plotting/plotfuncs.py b/xbout/plotting/plotfuncs.py index 498064c9..89535555 100644 --- a/xbout/plotting/plotfuncs.py +++ b/xbout/plotting/plotfuncs.py @@ -13,7 +13,6 @@ _decompose_regions, _is_core_only, _k3d_plot_isel, - _make_structured_triangulation, plot_separatrices, plot_targets, ) @@ -311,11 +310,11 @@ def plot2d_wrapper( for x, y in zip(x_regions, y_regions): if ( - not da.metadata["bout_xdim"] in x.dims - and not da.metadata["bout_ydim"] in x.dims + da.metadata["bout_xdim"] not in x.dims + and da.metadata["bout_ydim"] not in x.dims ) or ( - not da.metadata["bout_xdim"] in y.dims - and not da.metadata["bout_ydim"] in y.dims + da.metadata["bout_xdim"] not in y.dims + and da.metadata["bout_ydim"] not in y.dims ): # Small regions around X-point do not have segments in x- or y-directions, # so skip @@ -545,7 +544,6 @@ def plot3d( from scipy.interpolate import ( RegularGridInterpolator, griddata, - LinearNDInterpolator, ) print("start interpolating") @@ -801,7 +799,7 @@ def create_or_update_plot(plot_objects=None, tind=None, this_save_as=None): # First create png files in the temporary directory temp_path = Path(d) temp_save_as = str(temp_path.joinpath("temp.png")) - print(f"tind=0") + print("tind=0") plot_objects = create_or_update_plot( tind=0, this_save_as=temp_save_as ) diff --git a/xbout/plotting/utils.py b/xbout/plotting/utils.py index 2e443e0a..db70b74a 100644 --- a/xbout/plotting/utils.py +++ b/xbout/plotting/utils.py @@ -129,7 +129,6 @@ def plot_targets(da, ax, *, x="R", y="Z", hatching=True): da0 = list(da_regions.values())[0] - xcoord = da0.metadata["bout_xdim"] ycoord = da0.metadata["bout_ydim"] if da0.metadata["keep_yboundaries"]: diff --git a/xbout/region.py b/xbout/region.py index b6653824..71673287 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -1,4 +1,4 @@ -from copy import copy, deepcopy +from copy import deepcopy import numpy as np import xarray as xr from .utils import _set_attrs_on_all_vars @@ -1229,11 +1229,6 @@ def _create_single_region(ds, periodic_y=True): nx = ds.metadata["nx"] ny = ds.metadata["ny"] - mxg = ds.metadata["MXG"] - myg = ds.metadata["MYG"] - # keep_yboundaries is 1 if there are y-boundaries and 0 if there are not - ybndry = ds.metadata["keep_yboundaries"] * myg - connection = "all" if periodic_y else None regions = { diff --git a/xbout/tests/test_animate.py b/xbout/tests/test_animate.py index 13a37bdc..8701ae20 100644 --- a/xbout/tests/test_animate.py +++ b/xbout/tests/test_animate.py @@ -1,11 +1,9 @@ import pytest -import matplotlib from matplotlib import pyplot as plt import numpy as np import xarray as xr from xbout import open_boutdataset -from xbout.boutdataarray import BoutDataArrayAccessor from .test_load import create_bout_ds_list from animatplot.blocks import Pcolormesh, Line @@ -270,7 +268,7 @@ def test_animate_list_not_enough_nrowsncols(self, create_test_file): save_dir, ds = create_test_file with pytest.raises(ValueError): - animation = ds.isel(z=3).bout.animate_list( + ds.isel(z=3).bout.animate_list( ["n", ds["T"].isel(x=2), ds["n"].isel(y=1, z=2)], nrows=2, ncols=1 ) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 280aa309..62c909b2 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -3,13 +3,11 @@ import dask.array import numpy as np import numpy.testing as npt -from pathlib import Path import xarray as xr import xarray.testing as xrt from xarray.core.utils import dict_equiv -from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds from xbout import open_boutdataset from xbout.geometries import apply_geometry from xbout.utils import _1d_coord_from_spacing @@ -1147,6 +1145,6 @@ def test_derivatives_doublenull(self, bout_xyt_example_files): keep_yboundaries=True, ) - test_ddx = ds["n"].bout.ddx() - test_ddy = ds["n"].bout.ddy() - test_ddz = ds["n"].bout.ddz() + ds["n"].bout.ddx() + ds["n"].bout.ddy() + ds["n"].bout.ddz() diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 6ca5259a..694abc3d 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -1,7 +1,7 @@ import pytest import numpy.testing as npt -from xarray import Dataset, DataArray, concat, open_dataset, open_mfdataset +from xarray import Dataset, DataArray, concat, open_dataset import xarray.testing as xrt import dask.array @@ -9,14 +9,13 @@ from pathlib import Path from scipy.integrate import quad_vec -from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds from xbout.tests.test_region import ( params_guards, params_guards_values, params_boundaries, params_boundaries_values, ) -from xbout import BoutDatasetAccessor, open_boutdataset +from xbout import open_boutdataset from xbout.geometries import apply_geometry from xbout.utils import _set_attrs_on_all_vars, _1d_coord_from_spacing from xbout.tests.utils_for_tests import set_geometry_from_input_file @@ -683,7 +682,6 @@ def test_interpolate_parallel( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry for var in ["n", "T"]: v = ds[var] @@ -1172,7 +1170,6 @@ def test_interpolate_parallel_limiter( # Get high parallel resolution version of ds, and check that ds = ds.bout.interpolate_parallel(["n", "T"]) mxg = 2 - myg = 2 ixs1 = ds.metadata["ixseps1"] for var in ["n", "T"]: v = ds[var] @@ -2427,9 +2424,6 @@ def test_to_restart_change_npe_doublenull_expect_fail( with pytest.warns(UserWarning): ds = open_boutdataset(datapath=path) - nx = ds.metadata["nx"] - ny = ds.metadata["ny"] - # Save it to a netCDF file savepath = tmp_path_factory.mktemp( "test_to_restart_change_npe_doublenull_expect_fail" diff --git a/xbout/tests/test_fastoutput.py b/xbout/tests/test_fastoutput.py index b2e121d3..3e3bc208 100644 --- a/xbout/tests/test_fastoutput.py +++ b/xbout/tests/test_fastoutput.py @@ -1,10 +1,8 @@ import numpy as np import random -import pytest from xarray import DataArray, Dataset -import xarray.testing as xrt from xbout.fastoutput import open_fastoutput diff --git a/xbout/tests/test_grid.py b/xbout/tests/test_grid.py index 72da6bc8..007733b4 100644 --- a/xbout/tests/test_grid.py +++ b/xbout/tests/test_grid.py @@ -1,4 +1,4 @@ -from xarray import Dataset, DataArray, open_dataset, merge +from xarray import DataArray, open_dataset, merge from xarray.testing import assert_equal import pytest import numpy as np diff --git a/xbout/tests/test_load.py b/xbout/tests/test_load.py index 64fdfdb5..0fad3025 100644 --- a/xbout/tests/test_load.py +++ b/xbout/tests/test_load.py @@ -1,6 +1,4 @@ -from collections import namedtuple from copy import deepcopy -import inspect from pathlib import Path import re from functools import reduce @@ -124,7 +122,7 @@ def test_no_files(self, tmp_path): with pytest.raises(IOError): path = files_dir.joinpath("run*/example.*.nc") - actual_filepaths = _expand_filepaths(path) + _expand_filepaths(path) @pytest.fixture() @@ -307,9 +305,6 @@ def _bout_xyt_example_files( if guards is None: guards = {} - mxg = guards.get("x", 0) - myg = guards.get("y", 0) - if squashed: # create a single data-file, but alter the 'nxpe' and 'nype' variables, as if the # file had been created by combining a set of BOUT.dmp.*.nc files @@ -424,10 +419,6 @@ def create_bout_ds_list( filename = prefix + "." + str(num) + ".nc" file_list.append(filename) - # Include guard cells - upper_bndry_cells = {dim: guards.get(dim) for dim in guards.keys()} - lower_bndry_cells = {dim: guards.get(dim) for dim in guards.keys()} - ds = create_bout_ds( syn_data_type=syn_data_type, num=num, @@ -575,7 +566,6 @@ def create_bout_ds( MYSUB = lengths[2] - extra_boundary_points = 0 if topology == "core": ds["ixseps1"] = nx @@ -770,7 +760,6 @@ def create_bout_grid_ds(xsize=2, ysize=4, guards={}, topology="core", ny_inner=0 "jyseps2_1": jyseps2_1, "jyseps1_2": jyseps1_2, "ny_inner": ny_inner, - "y_boundary_guards": myg, } ) diff --git a/xbout/tests/test_plot.py b/xbout/tests/test_plot.py index 18685750..4042f43a 100644 --- a/xbout/tests/test_plot.py +++ b/xbout/tests/test_plot.py @@ -1,11 +1,9 @@ import pytest -from pathlib import Path from matplotlib import pyplot as plt import numpy as np from xbout import open_boutdataset -from xbout.tests.test_load import bout_xyt_example_files from xbout.tests.test_region import ( params_guards, params_guards_values, diff --git a/xbout/tests/test_region.py b/xbout/tests/test_region.py index 0e49e11b..8ca6dc58 100644 --- a/xbout/tests/test_region.py +++ b/xbout/tests/test_region.py @@ -1,11 +1,9 @@ import pytest -from pathlib import Path import numpy.testing as npt import xarray.testing as xrt -from xbout.tests.test_load import bout_xyt_example_files from xbout import open_boutdataset @@ -141,8 +139,6 @@ def test_region_limiter( keep_yboundaries=keep_yboundaries, ) - mxg = guards["x"] - if keep_xboundaries: ixs = ds.metadata["ixseps1"] else: @@ -239,7 +235,6 @@ def test_region_xpoint( jys1 = ds.metadata["jyseps1_1"] + ybndry ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys2 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -433,7 +428,6 @@ def test_region_singlenull( ybndry = 0 jys1 = ds.metadata["jyseps1_1"] + ybndry jys2 = ds.metadata["jyseps2_2"] + ybndry - ny = ds.metadata["ny"] + 2 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -606,7 +600,6 @@ def test_region_connecteddoublenull( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -928,7 +921,6 @@ def test_region_disconnecteddoublenull( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -1502,7 +1494,6 @@ def test_region_disconnecteddoublenull_get_one_guard( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry if isinstance(with_guards, dict): xguards = with_guards.get("x", mxg) diff --git a/xbout/utils.py b/xbout/utils.py index 9587fcb4..5909bf83 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -252,7 +252,6 @@ def _check_new_nxpe(ds, nxpe): # Check nxpe is valid nx = ds.metadata["nx"] - 2 * ds.metadata["MXG"] - mxsub = nx // nxpe if nx % nxpe != 0: raise ValueError( @@ -686,7 +685,6 @@ def _follow_boundary(ds, start_region, start_direction, xbndry, ybndry, Rcoord, visited_regions.append(this_region) ds_region = ds.bout.from_region(this_region, with_guards=0) - region = ds.bout._regions[this_region] # Get all boundary points from this region, and decide which region to go to next this_region = None @@ -749,9 +747,6 @@ def _get_bounding_surfaces(ds, coords): else: ybndry = 0 - xcoord = ds.metadata["bout_xdim"] - ycoord = ds.metadata["bout_ydim"] - # First find the outer boundary start_region = None for name, region in ds.bout._regions.items(): From 896ceb4bc122bc20071f4ad4f26562bac87cd040 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Wed, 26 Jul 2023 15:31:25 +0100 Subject: [PATCH 36/42] Delete some duplicated methods --- xbout/boutdataarray.py | 3 --- xbout/boutdataset.py | 3 --- xbout/region.py | 3 --- 3 files changed, 9 deletions(-) diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 1416ec84..b018cd91 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -384,9 +384,6 @@ def interpolate_parallel( return da - def add_cartesian_coordinates(self): - return _add_cartesian_coordinates(self.data) - def add_cartesian_coordinates(self): """ Add Cartesian (X,Y,Z) coordinates. diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 8f8dc1db..cc308fcb 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -266,9 +266,6 @@ def find_with_dims(first_var, dims): return ds - def add_cartesian_coordinates(self): - return _add_cartesian_coordinates(self.data) - def integrate_midpoints(self, variable, *, dims=None, cumulative_t=False): """ Integrate using the midpoint rule for spatial dimensions, and trapezium rule for diff --git a/xbout/region.py b/xbout/region.py index 71673287..d30d4047 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -145,9 +145,6 @@ def __repr__(self): result += f"\t{attr}\t{val}\n" return result - def __eq__(self, other): - return vars(self) == vars(other) - def get_slices(self, mxg=0, myg=0): """ Return x- and y-dimension slices that select this region from the global From d62e8a84c3aae9983804f9958e2b29b7014cb6fc Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 14:16:10 +0100 Subject: [PATCH 37/42] Fix a couple of undefined names --- xbout/plotting/plotfuncs.py | 2 +- xbout/tests/test_boutdataset.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/xbout/plotting/plotfuncs.py b/xbout/plotting/plotfuncs.py index 89535555..e115d434 100644 --- a/xbout/plotting/plotfuncs.py +++ b/xbout/plotting/plotfuncs.py @@ -47,7 +47,7 @@ def plot_regions(da, ax=None, **kwargs): da_regions = _decompose_regions(da) colored_regions = [ - xr.full_like(da_region, fill_value=num / len(regions)) + xr.full_like(da_region, fill_value=num / len(da_regions)) for num, da_region in enumerate(da_regions.values()) ] diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 694abc3d..50448c68 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -1927,6 +1927,8 @@ def test_load_options(self): @pytest.mark.skip def test_load_options_in_dataset(self, bout_xyt_example_files): + from boutdata.data import BoutOptions + dataset_list = bout_xyt_example_files(None, nxpe=1, nype=1, nt=1) ds = open_boutdataset( datapath=dataset_list, inputfilepath=EXAMPLE_OPTIONS_FILE_PATH From 5e68612011d032e03617cbca104b5aed4a7ac335 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 15:04:14 +0100 Subject: [PATCH 38/42] CI: Add ruff linter --- .github/workflows/master.yml | 19 ------------------- .github/workflows/pythonpackage.yml | 18 ------------------ .github/workflows/ruff.yaml | 15 +++++++++++++++ 3 files changed, 15 insertions(+), 37 deletions(-) create mode 100644 .github/workflows/ruff.yaml diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index ad1fde43..8eef5a3b 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -68,22 +68,3 @@ jobs: - name: Test with pytest run: | pytest -vv --long --cov - - - # Need to tidy up the things that flake8 finds before we activate this - #flake8: - - # runs-on: ubuntu-latest - # if: always() - - # steps: - # - uses: actions/checkout@v2 - # - name: Set up Python - # uses: actions/setup-python@v1 - # - name: Install dependencies - # run: | - # python -m pip install --upgrade pip - # - name: Lint with flake8 - # run: | - # pip install flake8 - # flake8 diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 2dfd84f4..e323fa86 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -68,21 +68,3 @@ jobs: - name: Test with pytest run: | pytest -vv --long --cov - - # Need to tidy up the things that flake8 finds before we activate this - #flake8: - - # runs-on: ubuntu-latest - # if: always() - - # steps: - # - uses: actions/checkout@v2 - # - name: Set up Python - # uses: actions/setup-python@v1 - # - name: Install dependencies - # run: | - # python -m pip install --upgrade pip - # - name: Lint with flake8 - # run: | - # pip install flake8 - # flake8 diff --git a/.github/workflows/ruff.yaml b/.github/workflows/ruff.yaml new file mode 100644 index 00000000..5c26f5dd --- /dev/null +++ b/.github/workflows/ruff.yaml @@ -0,0 +1,15 @@ +name: Linting +on: [pull_request] + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install ruff + run: pip install ruff + - name: Run ruff + run: ruff xbout From 0fe6e3ba56f41a3d4a45912312c68c32be829fa8 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 15:13:00 +0100 Subject: [PATCH 39/42] Remove duplicated function --- xbout/utils.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/xbout/utils.py b/xbout/utils.py index 5909bf83..2711d37d 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -128,27 +128,6 @@ def update_ny(name): return da -def _add_cartesian_coordinates(ds): - # Add Cartesian X and Y coordinates if they do not exist already - # Works on either BoutDataset or BoutDataArray - - R = ds["R"] - Z = ds["Z"] - zeta = ds[ds.metadata["bout_zdim"]] - if "X_cartesian" not in ds.coords: - X = R * np.cos(zeta) - ds = ds.assign_coords(X_cartesian=X) - if "Y_cartesian" not in ds.coords: - Y = R * np.sin(zeta) - ds = ds.assign_coords(Y_cartesian=Y) - if "Z_cartesian" not in ds.coords: - zcoord = ds.metadata["bout_zdim"] - nz = len(ds[zcoord]) - ds = ds.assign_coords(Z_cartesian=Z.expand_dims({zcoord: nz}, axis=-1)) - - return ds - - def _1d_coord_from_spacing(spacing, dim, ds=None, *, origin_at=None): """ Create a 1d coordinate varying along the dimension 'dim' from the grid spacing From 38c5880800e702c5db3b0a3570082519dcbdfa19 Mon Sep 17 00:00:00 2001 From: Peter Hill Date: Thu, 27 Jul 2023 15:35:14 +0100 Subject: [PATCH 40/42] Move test fixtures into `conftest.py` --- .../boutmodules/tests/test_stormdataset.py | 1 - xbout/tests/conftest.py | 189 ++++++ xbout/tests/test_against_collect.py | 2 +- xbout/tests/test_animate.py | 2 +- xbout/tests/test_load.py | 599 +----------------- xbout/tests/utils_for_tests.py | 416 ++++++++++++ 6 files changed, 609 insertions(+), 600 deletions(-) create mode 100644 xbout/tests/conftest.py diff --git a/examples/boutmodules/tests/test_stormdataset.py b/examples/boutmodules/tests/test_stormdataset.py index 622de7fb..8f51f15b 100644 --- a/examples/boutmodules/tests/test_stormdataset.py +++ b/examples/boutmodules/tests/test_stormdataset.py @@ -2,7 +2,6 @@ from xarray import concat from xbout import open_boutdataset -from xbout.tests.test_load import bout_xyt_example_files @pytest.mark.skip diff --git a/xbout/tests/conftest.py b/xbout/tests/conftest.py new file mode 100644 index 00000000..99b3b016 --- /dev/null +++ b/xbout/tests/conftest.py @@ -0,0 +1,189 @@ +import operator +import re +from copy import deepcopy +from functools import reduce +from pathlib import Path + +import numpy as np +import pytest +from xarray import DataArray + +from xbout.tests.utils_for_tests import ( + _get_kwargs, + create_bout_ds_list, + create_bout_grid_ds, +) + + +@pytest.fixture(scope="session") +def bout_xyt_example_files(tmp_path_factory): + return _bout_xyt_example_files + + +_bout_xyt_example_files_cache = {} + + +def _bout_xyt_example_files( + tmp_path_factory, + prefix="BOUT.dmp", + lengths=(6, 2, 4, 7), + nxpe=4, + nype=2, + nt=1, + guards=None, + syn_data_type="random", + grid=None, + squashed=False, + topology="core", + write_to_disk=False, + bout_v5=False, + metric_3D=False, +): + """ + Mocks up a set of BOUT-like Datasets + + Either returns list of Datasets (if write_to_disk=False) + or writes Datasets to netCDF files and returns the temporary test directory + containing them, deleting the temporary directory once that test is done (if + write_to_disk=True). + """ + call_args = _get_kwargs(ignore="tmp_path_factory") + + try: + # Has been called with the same signature before, just return the cached result + return deepcopy(_bout_xyt_example_files_cache[call_args]) + except KeyError: + pass + + if guards is None: + guards = {} + + if squashed: + # create a single data-file, but alter the 'nxpe' and 'nype' variables, as if the + # file had been created by combining a set of BOUT.dmp.*.nc files + this_lengths = ( + lengths[0], + lengths[1] * nxpe, + lengths[2] * nype, + lengths[3], + ) + ds_list, file_list = create_bout_ds_list( + prefix=prefix, + lengths=this_lengths, + nxpe=1, + nype=1, + nt=nt, + guards=guards, + topology=topology, + syn_data_type=syn_data_type, + squashed=True, + bout_v5=bout_v5, + metric_3D=metric_3D, + ) + ds_list[0]["nxpe"] = nxpe + ds_list[0]["nype"] = nype + else: + ds_list, file_list = create_bout_ds_list( + prefix=prefix, + lengths=lengths, + nxpe=nxpe, + nype=nype, + nt=nt, + guards=guards, + topology=topology, + syn_data_type=syn_data_type, + bout_v5=bout_v5, + metric_3D=metric_3D, + ) + + if grid is not None: + xsize = lengths[1] * nxpe + ysize = lengths[2] * nype + grid_ds = create_bout_grid_ds( + xsize=xsize, + ysize=ysize, + guards=guards, + topology=topology, + ny_inner=3 * lengths[2], + ) + + if not write_to_disk: + if grid is None: + _bout_xyt_example_files_cache[call_args] = ds_list + return deepcopy(ds_list) + else: + _bout_xyt_example_files_cache[call_args] = ds_list, grid_ds + return deepcopy((ds_list, grid_ds)) + raise ValueError("tmp_path_factory required when write_to_disk=True") + + save_dir = tmp_path_factory.mktemp("data") + + for ds, file_name in zip(ds_list, file_list): + ds.to_netcdf(save_dir.joinpath(file_name)) + + if grid is not None: + grid_ds.to_netcdf(save_dir.joinpath(grid + ".nc")) + + # Return a glob-like path to all files created, which has all file numbers replaced + # with a single asterix + path = str(save_dir.joinpath(file_list[-1])) + + count = 1 + if nt > 1: + count += 1 + # We have to reverse the path before limiting the number of numbers replaced so that the + # tests don't get confused by pytest's persistent temporary directories (which are also designated + # by different numbers) + glob_pattern = Path((re.sub(r"\d+", "*", path[::-1], count=count))[::-1]) + _bout_xyt_example_files_cache[call_args] = glob_pattern + return glob_pattern + + +fci_shape = (2, 2, 3, 4) +fci_guards = (2, 2, 0) + + +@pytest.fixture +def create_example_grid_file_fci(tmp_path_factory): + """ + Mocks up a FCI-like netCDF file, and return the temporary test + directory containing them. + + Deletes the temporary directory once that test is done. + """ + + # Create grid dataset + shape = (fci_shape[1] + 2 * fci_guards[0], *fci_shape[2:]) + arr = np.arange(reduce(operator.mul, shape, 1)).reshape(shape) + grid = DataArray(data=arr, name="R", dims=["x", "y", "z"]).to_dataset() + grid["Z"] = DataArray(np.random.random(shape), dims=["x", "y", "z"]) + grid["dy"] = DataArray(np.ones(shape), dims=["x", "y", "z"]) + grid = grid.set_coords(["dy"]) + + # Create temporary directory + save_dir = tmp_path_factory.mktemp("griddata") + + # Save + filepath = save_dir.joinpath("fci.nc") + grid.to_netcdf(filepath, engine="netcdf4") + + return filepath + + +@pytest.fixture +def create_example_files_fci(tmp_path_factory): + return _bout_xyt_example_files( + tmp_path_factory, + lengths=fci_shape, + nxpe=1, + nype=1, + # nt=1, + guards={a: b for a, b in zip("xyz", fci_guards)}, + syn_data_type="random", + grid=None, + squashed=False, + # topology="core", + write_to_disk=False, + bout_v5=True, + metric_3D=True, + ) diff --git a/xbout/tests/test_against_collect.py b/xbout/tests/test_against_collect.py index 81d8b33c..5f22cf97 100644 --- a/xbout/tests/test_against_collect.py +++ b/xbout/tests/test_against_collect.py @@ -3,7 +3,7 @@ from xbout import open_boutdataset, collect as new_collect -from .test_load import create_bout_ds, create_bout_ds_list, METADATA_VARS +from .utils_for_tests import create_bout_ds, create_bout_ds_list, METADATA_VARS boutdata = pytest.importorskip("boutdata", reason="boutdata is not available") old_collect = boutdata.collect diff --git a/xbout/tests/test_animate.py b/xbout/tests/test_animate.py index 8701ae20..fd565c55 100644 --- a/xbout/tests/test_animate.py +++ b/xbout/tests/test_animate.py @@ -4,7 +4,7 @@ import xarray as xr from xbout import open_boutdataset -from .test_load import create_bout_ds_list +from .utils_for_tests import create_bout_ds_list from animatplot.blocks import Pcolormesh, Line diff --git a/xbout/tests/test_load.py b/xbout/tests/test_load.py index 0fad3025..d8766236 100644 --- a/xbout/tests/test_load.py +++ b/xbout/tests/test_load.py @@ -1,14 +1,8 @@ -from copy import deepcopy from pathlib import Path -import re -from functools import reduce -import operator import pytest -import numpy as np - -from xarray import DataArray, Dataset, concat +from xarray import concat from xarray.tests.test_dataset import create_test_data import xarray.testing as xrt @@ -26,7 +20,7 @@ _BOUT_TIME_DEPENDENT_META_VARS, ) from xbout.utils import _separate_metadata -from xbout.tests.utils_for_tests import _get_kwargs +from xbout.tests.utils_for_tests import create_bout_ds, METADATA_VARS def test_check_extensions(tmp_path): @@ -262,545 +256,6 @@ def test_arrange_along_xyt(self, create_filepaths): assert actual_concat_dims == ["t", "y", "x"] -@pytest.fixture(scope="session") -def bout_xyt_example_files(tmp_path_factory): - return _bout_xyt_example_files - - -_bout_xyt_example_files_cache = {} - - -def _bout_xyt_example_files( - tmp_path_factory, - prefix="BOUT.dmp", - lengths=(6, 2, 4, 7), - nxpe=4, - nype=2, - nt=1, - guards=None, - syn_data_type="random", - grid=None, - squashed=False, - topology="core", - write_to_disk=False, - bout_v5=False, - metric_3D=False, -): - """ - Mocks up a set of BOUT-like Datasets - - Either returns list of Datasets (if write_to_disk=False) - or writes Datasets to netCDF files and returns the temporary test directory - containing them, deleting the temporary directory once that test is done (if - write_to_disk=True). - """ - call_args = _get_kwargs(ignore="tmp_path_factory") - - try: - # Has been called with the same signature before, just return the cached result - return deepcopy(_bout_xyt_example_files_cache[call_args]) - except KeyError: - pass - - if guards is None: - guards = {} - - if squashed: - # create a single data-file, but alter the 'nxpe' and 'nype' variables, as if the - # file had been created by combining a set of BOUT.dmp.*.nc files - this_lengths = ( - lengths[0], - lengths[1] * nxpe, - lengths[2] * nype, - lengths[3], - ) - ds_list, file_list = create_bout_ds_list( - prefix=prefix, - lengths=this_lengths, - nxpe=1, - nype=1, - nt=nt, - guards=guards, - topology=topology, - syn_data_type=syn_data_type, - squashed=True, - bout_v5=bout_v5, - metric_3D=metric_3D, - ) - ds_list[0]["nxpe"] = nxpe - ds_list[0]["nype"] = nype - else: - ds_list, file_list = create_bout_ds_list( - prefix=prefix, - lengths=lengths, - nxpe=nxpe, - nype=nype, - nt=nt, - guards=guards, - topology=topology, - syn_data_type=syn_data_type, - bout_v5=bout_v5, - metric_3D=metric_3D, - ) - - if grid is not None: - xsize = lengths[1] * nxpe - ysize = lengths[2] * nype - grid_ds = create_bout_grid_ds( - xsize=xsize, - ysize=ysize, - guards=guards, - topology=topology, - ny_inner=3 * lengths[2], - ) - - if not write_to_disk: - if grid is None: - _bout_xyt_example_files_cache[call_args] = ds_list - return deepcopy(ds_list) - else: - _bout_xyt_example_files_cache[call_args] = ds_list, grid_ds - return deepcopy((ds_list, grid_ds)) - raise ValueError("tmp_path_factory required when write_to_disk=True") - - save_dir = tmp_path_factory.mktemp("data") - - for ds, file_name in zip(ds_list, file_list): - ds.to_netcdf(save_dir.joinpath(file_name)) - - if grid is not None: - grid_ds.to_netcdf(save_dir.joinpath(grid + ".nc")) - - # Return a glob-like path to all files created, which has all file numbers replaced - # with a single asterix - path = str(save_dir.joinpath(file_list[-1])) - - count = 1 - if nt > 1: - count += 1 - # We have to reverse the path before limiting the number of numbers replaced so that the - # tests don't get confused by pytest's persistent temporary directories (which are also designated - # by different numbers) - glob_pattern = Path((re.sub(r"\d+", "*", path[::-1], count=count))[::-1]) - _bout_xyt_example_files_cache[call_args] = glob_pattern - return glob_pattern - - -def create_bout_ds_list( - prefix, - lengths=(6, 2, 4, 7), - nxpe=4, - nype=2, - nt=1, - guards={}, - topology="core", - syn_data_type="random", - squashed=False, - bout_v5=False, - metric_3D=False, -): - """ - Mocks up a set of BOUT-like datasets. - - Structured as though they were produced by a x-y parallelised run with multiple restarts. - """ - - if nt != 1: - raise ValueError( - "nt > 1 means the time dimension is split over several " - + "directories. This is not implemented yet." - ) - - file_list = [] - ds_list = [] - for j in range(nype): - for i in range(nxpe): - num = i + nxpe * j - filename = prefix + "." + str(num) + ".nc" - file_list.append(filename) - - ds = create_bout_ds( - syn_data_type=syn_data_type, - num=num, - lengths=lengths, - nxpe=nxpe, - nype=nype, - xproc=i, - yproc=j, - guards=guards, - topology=topology, - squashed=squashed, - bout_v5=bout_v5, - metric_3D=metric_3D, - ) - ds_list.append(ds) - - return ds_list, file_list - - -_create_bout_ds_cache = {} - - -def create_bout_ds( - syn_data_type="random", - lengths=(6, 2, 4, 7), - num=0, - nxpe=1, - nype=1, - xproc=0, - yproc=0, - guards=None, - topology="core", - squashed=False, - bout_v5=False, - metric_3D=False, -): - call_args = _get_kwargs() - - try: - # Has been called with the same signature before, just return the cached result - return deepcopy(_create_bout_ds_cache[call_args]) - except KeyError: - pass - - if metric_3D and not bout_v5: - raise ValueError("3D metric requires BOUT++ v5") - - if guards is None: - guards = {} - - # Set the shape of the data in this dataset - t_length, x_length, y_length, z_length = lengths - mxg = guards.get("x", 0) - myg = guards.get("y", 0) - x_length += 2 * mxg - y_length += 2 * myg - - # calculate global nx, ny and nz - nx = nxpe * lengths[1] + 2 * mxg - ny = nype * lengths[2] - nz = 1 * lengths[3] - - if squashed and "double-null" in topology: - ny = ny + 2 * myg - y_length = y_length + 2 * myg - shape = (t_length, x_length, y_length, z_length) - - # Fill with some kind of synthetic data - if syn_data_type == "random": - # Each dataset contains unique random noise - np.random.seed(seed=num) - data = np.random.randn(*shape) - elif syn_data_type == "linear": - # Variables increase linearly across entire domain - data = DataArray(-np.ones(shape), dims=("t", "x", "y", "z")) - - t_array = DataArray( - (nx - 2 * mxg) * ny * nz * np.arange(t_length, dtype=float), dims="t" - ) - x_array = DataArray( - ny * nz * (xproc * lengths[1] + np.arange(lengths[1], dtype=float)), - dims="x", - ) - y_array = DataArray( - nz * (yproc * lengths[2] + np.arange(lengths[2], dtype=float)), dims="y" - ) - z_array = DataArray(np.arange(z_length, dtype=float), dims="z") - - data[:, mxg : x_length - mxg, myg : lengths[2] + myg, :] = ( - t_array + x_array + y_array + z_array - ) - elif syn_data_type == "stepped": - # Each dataset contains a different number depending on the filename - data = np.ones(shape) * num - elif isinstance(syn_data_type, int): - data = np.ones(shape) * syn_data_type - else: - raise ValueError("Not a recognised choice of type of synthetic bout data.") - - T = DataArray(data, dims=["t", "x", "y", "z"]) - n = DataArray(data, dims=["t", "x", "y", "z"]) - S = DataArray(data[:, :, :, 0], dims=["t", "x", "y"]) - for v in [n, T]: - v.attrs["direction_y"] = "Standard" - v.attrs["cell_location"] = "CELL_CENTRE" - v.attrs["direction_z"] = "Standard" - for v in [S]: - v.attrs["direction_y"] = "Standard" - v.attrs["cell_location"] = "CELL_CENTRE" - v.attrs["direction_z"] = "Average" - ds = Dataset({"n": n, "T": T, "S": S}) - - # BOUT_VERSION needed to deal with backwards incompatible changes: - # - # - v3 and earlier: number of points in z is MZ-1 - # - v4 and later: number of points in z is MZ - # - v5 and later: metric components can be either 2D or 3D - # - v5 and later: dz changed to be a Field2D/3D - ds["BOUT_VERSION"] = 5.0 if bout_v5 else 4.3 - ds["use_metric_3d"] = int(metric_3D) - - # Include grid data - ds["NXPE"] = nxpe - ds["NYPE"] = nype - ds["NZPE"] = 1 - ds["PE_XIND"] = xproc - ds["PE_YIND"] = yproc - ds["MYPE"] = num - - ds["MXG"] = mxg - ds["MYG"] = myg - ds["MZG"] = 0 - ds["nx"] = nx - ds["ny"] = ny - ds["nz"] = nz - ds["MZ"] = 1 * lengths[3] - if squashed: - ds["MXSUB"] = lengths[1] // nxpe - ds["MYSUB"] = lengths[2] // nype - ds["MZSUB"] = lengths[3] - else: - ds["MXSUB"] = lengths[1] - ds["MYSUB"] = lengths[2] - ds["MZSUB"] = lengths[3] - - MYSUB = lengths[2] - - - if topology == "core": - ds["ixseps1"] = nx - ds["ixseps2"] = nx - ds["jyseps1_1"] = -1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - 1 - ds["ny_inner"] = ny // 2 - elif topology == "sol": - ds["ixseps1"] = 0 - ds["ixseps2"] = 0 - ds["jyseps1_1"] = -1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - 1 - ds["ny_inner"] = ny // 2 - elif topology == "limiter": - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx - ds["jyseps1_1"] = -1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - 1 - ds["ny_inner"] = ny // 2 - elif topology == "xpoint": - if nype < 4 and not squashed: - raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx // 2 - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 2 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = MYSUB - 1 - ds["jyseps1_2"] = ny - MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - elif topology == "single-null": - if nype < 3 and not squashed: - raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx - ds["jyseps1_1"] = MYSUB - 1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - ds["ny_inner"] = ny // 2 - elif topology == "connected-double-null": - if nype < 6 and not squashed: - raise ValueError( - "Not enough processors for connected-double-null topology: " - f"nype={nype}" - ) - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx // 2 - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 3 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = ny_inner - MYSUB - 1 - ds["jyseps1_2"] = ny_inner + MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - elif topology == "lower-disconnected-double-null": - if nype < 6 and not squashed: - raise ValueError( - "Not enough processors for lower-disconnected-double-null " - f"topology: nype={nype}" - ) - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx // 2 + 4 - if ds["ixseps2"] >= nx: - raise ValueError( - "Not enough points in the x-direction. ixseps2=" - f'{ds["ixseps2"]} > nx={nx}' - ) - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 3 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = ny_inner - MYSUB - 1 - ds["jyseps1_2"] = ny_inner + MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - elif topology == "upper-disconnected-double-null": - if nype < 6 and not squashed: - raise ValueError( - "Not enough processors for upper-disconnected-double-null " - f"topology: nype={nype}" - ) - ds["ixseps2"] = nx // 2 - ds["ixseps1"] = nx // 2 + 4 - if ds["ixseps2"] >= nx: - raise ValueError( - "Not enough points in the x-direction. ixseps2=" - f'{ds["ixseps2"]} > nx={nx}' - ) - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 3 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = ny_inner - MYSUB - 1 - ds["jyseps1_2"] = ny_inner + MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - else: - raise ValueError(f"Unrecognised topology={topology}") - - if metric_3D: - one = DataArray(np.ones((x_length, y_length, z_length)), dims=["x", "y", "z"]) - zero = DataArray(np.zeros((x_length, y_length, z_length)), dims=["x", "y", "z"]) - else: - one = DataArray(np.ones((x_length, y_length)), dims=["x", "y"]) - zero = DataArray(np.zeros((x_length, y_length)), dims=["x", "y"]) - - ds["zperiod"] = 1 - ds["ZMIN"] = 0.0 - ds["ZMAX"] = 1.0 - ds["g11"] = one - ds["g22"] = one - ds["g33"] = one - ds["g12"] = zero - ds["g13"] = zero - ds["g23"] = zero - ds["g_11"] = one - ds["g_22"] = one - ds["g_33"] = one - ds["g_12"] = zero - ds["g_13"] = zero - ds["g_23"] = zero - ds["G1"] = zero - ds["G2"] = zero - ds["G3"] = zero - ds["J"] = one - ds["Bxy"] = one - ds["zShift"] = zero - - ds["dx"] = 0.5 * one - ds["dy"] = 2.0 * one - if bout_v5: - ds["dz"] = 2.0 * one * np.pi / nz - else: - ds["dz"] = 2.0 * np.pi / nz - - ds["iteration"] = t_length - 1 - ds["hist_hi"] = t_length - 1 - ds["t_array"] = DataArray(np.arange(t_length, dtype=float) * 10.0, dims="t") - ds["tt"] = ds["t_array"][-1] - - # xarray adds this encoding when opening a file. Emulate here as it may be used to - # get the file number - ds.encoding["source"] = f"BOUT.dmp.{num}.nc" - - _create_bout_ds_cache[call_args] = ds - return deepcopy(ds) - - -_create_bout_grid_ds_cache = {} - - -def create_bout_grid_ds(xsize=2, ysize=4, guards={}, topology="core", ny_inner=0): - call_args = _get_kwargs() - - try: - # Has been called with the same signature before, just return the cached result - return deepcopy(_create_bout_grid_ds_cache[call_args]) - except KeyError: - pass - - # Set the shape of the data in this dataset - mxg = guards.get("x", 0) - myg = guards.get("y", 0) - xsize += 2 * mxg - ysize += 2 * myg - - # jyseps* from grid file only ever used to check topology when loading the grid file, - # so do not need to be consistent with the main dataset - jyseps2_1 = ysize // 2 - jyseps1_2 = jyseps2_1 - - if "double-null" in topology or "xpoint" in topology: - # Has upper target as well - ysize += 2 * myg - - # make different from jyseps2_1 so double-null toplogy is recognised - jyseps1_2 += 1 - - shape = (xsize, ysize) - - data = DataArray(np.ones(shape), dims=["x", "y"]) - - ds = Dataset( - { - "psixy": data, - "Rxy": data, - "Zxy": data, - "hthe": data, - "y_boundary_guards": myg, - "jyseps2_1": jyseps2_1, - "jyseps1_2": jyseps1_2, - "ny_inner": ny_inner, - } - ) - - _create_bout_grid_ds_cache[call_args] = ds - return deepcopy(ds) - - -# Note, MYPE, PE_XIND and PE_YIND not included, since they are different for each -# processor and so are dropped when loading datasets. -METADATA_VARS = [ - "BOUT_VERSION", - "NXPE", - "NYPE", - "NZPE", - "MXG", - "MYG", - "MZG", - "nx", - "ny", - "nz", - "MZ", - "MXSUB", - "MYSUB", - "MZSUB", - "hist_hi", - "iteration", - "ixseps1", - "ixseps2", - "jyseps1_1", - "jyseps1_2", - "jyseps2_1", - "jyseps2_2", - "ny_inner", - "tt", - "zperiod", - "ZMIN", - "ZMAX", - "use_metric_3d", -] - - class TestStripMetadata: def test_strip_metadata(self): original = create_bout_ds() @@ -1495,53 +950,3 @@ def test_trim_timing_info(self, is_restart): expected = create_test_data(0) xrt.assert_equal(ds, expected) - - -fci_shape = (2, 2, 3, 4) -fci_guards = (2, 2, 0) - - -@pytest.fixture -def create_example_grid_file_fci(tmp_path_factory): - """ - Mocks up a FCI-like netCDF file, and return the temporary test - directory containing them. - - Deletes the temporary directory once that test is done. - """ - - # Create grid dataset - shape = (fci_shape[1] + 2 * fci_guards[0], *fci_shape[2:]) - arr = np.arange(reduce(operator.mul, shape, 1)).reshape(shape) - grid = DataArray(data=arr, name="R", dims=["x", "y", "z"]).to_dataset() - grid["Z"] = DataArray(np.random.random(shape), dims=["x", "y", "z"]) - grid["dy"] = DataArray(np.ones(shape), dims=["x", "y", "z"]) - grid = grid.set_coords(["dy"]) - - # Create temporary directory - save_dir = tmp_path_factory.mktemp("griddata") - - # Save - filepath = save_dir.joinpath("fci.nc") - grid.to_netcdf(filepath, engine="netcdf4") - - return filepath - - -@pytest.fixture -def create_example_files_fci(tmp_path_factory): - return _bout_xyt_example_files( - tmp_path_factory, - lengths=fci_shape, - nxpe=1, - nype=1, - # nt=1, - guards={a: b for a, b in zip("xyz", fci_guards)}, - syn_data_type="random", - grid=None, - squashed=False, - # topology="core", - write_to_disk=False, - bout_v5=True, - metric_3D=True, - ) diff --git a/xbout/tests/utils_for_tests.py b/xbout/tests/utils_for_tests.py index bcdb1850..01f01159 100644 --- a/xbout/tests/utils_for_tests.py +++ b/xbout/tests/utils_for_tests.py @@ -1,8 +1,44 @@ +from copy import deepcopy from boutdata.data import BoutOptionsFile from gelidum import freeze import inspect import numpy as np from pathlib import Path +from xarray import DataArray, Dataset + + +# Note, MYPE, PE_XIND and PE_YIND not included, since they are different for each +# processor and so are dropped when loading datasets. +METADATA_VARS = [ + "BOUT_VERSION", + "NXPE", + "NYPE", + "NZPE", + "MXG", + "MYG", + "MZG", + "nx", + "ny", + "nz", + "MZ", + "MXSUB", + "MYSUB", + "MZSUB", + "hist_hi", + "iteration", + "ixseps1", + "ixseps2", + "jyseps1_1", + "jyseps1_2", + "jyseps2_1", + "jyseps2_2", + "ny_inner", + "tt", + "zperiod", + "ZMIN", + "ZMAX", + "use_metric_3d", +] def load_example_input(name, *, nx=None, ny=None, nz=None): @@ -139,3 +175,383 @@ def _get_kwargs(ignore=None): kwargs[key] = value return freeze(kwargs) + + +def create_bout_ds_list( + prefix, + lengths=(6, 2, 4, 7), + nxpe=4, + nype=2, + nt=1, + guards={}, + topology="core", + syn_data_type="random", + squashed=False, + bout_v5=False, + metric_3D=False, +): + """ + Mocks up a set of BOUT-like datasets. + + Structured as though they were produced by a x-y parallelised run with multiple restarts. + """ + + if nt != 1: + raise ValueError( + "nt > 1 means the time dimension is split over several " + + "directories. This is not implemented yet." + ) + + file_list = [] + ds_list = [] + for j in range(nype): + for i in range(nxpe): + num = i + nxpe * j + filename = prefix + "." + str(num) + ".nc" + file_list.append(filename) + + ds = create_bout_ds( + syn_data_type=syn_data_type, + num=num, + lengths=lengths, + nxpe=nxpe, + nype=nype, + xproc=i, + yproc=j, + guards=guards, + topology=topology, + squashed=squashed, + bout_v5=bout_v5, + metric_3D=metric_3D, + ) + ds_list.append(ds) + + return ds_list, file_list + + +_create_bout_ds_cache = {} + + +def create_bout_ds( + syn_data_type="random", + lengths=(6, 2, 4, 7), + num=0, + nxpe=1, + nype=1, + xproc=0, + yproc=0, + guards=None, + topology="core", + squashed=False, + bout_v5=False, + metric_3D=False, +): + call_args = _get_kwargs() + + try: + # Has been called with the same signature before, just return the cached result + return deepcopy(_create_bout_ds_cache[call_args]) + except KeyError: + pass + + if metric_3D and not bout_v5: + raise ValueError("3D metric requires BOUT++ v5") + + if guards is None: + guards = {} + + # Set the shape of the data in this dataset + t_length, x_length, y_length, z_length = lengths + mxg = guards.get("x", 0) + myg = guards.get("y", 0) + x_length += 2 * mxg + y_length += 2 * myg + + # calculate global nx, ny and nz + nx = nxpe * lengths[1] + 2 * mxg + ny = nype * lengths[2] + nz = 1 * lengths[3] + + if squashed and "double-null" in topology: + ny = ny + 2 * myg + y_length = y_length + 2 * myg + shape = (t_length, x_length, y_length, z_length) + + # Fill with some kind of synthetic data + if syn_data_type == "random": + # Each dataset contains unique random noise + np.random.seed(seed=num) + data = np.random.randn(*shape) + elif syn_data_type == "linear": + # Variables increase linearly across entire domain + data = DataArray(-np.ones(shape), dims=("t", "x", "y", "z")) + + t_array = DataArray( + (nx - 2 * mxg) * ny * nz * np.arange(t_length, dtype=float), dims="t" + ) + x_array = DataArray( + ny * nz * (xproc * lengths[1] + np.arange(lengths[1], dtype=float)), + dims="x", + ) + y_array = DataArray( + nz * (yproc * lengths[2] + np.arange(lengths[2], dtype=float)), dims="y" + ) + z_array = DataArray(np.arange(z_length, dtype=float), dims="z") + + data[:, mxg : x_length - mxg, myg : lengths[2] + myg, :] = ( + t_array + x_array + y_array + z_array + ) + elif syn_data_type == "stepped": + # Each dataset contains a different number depending on the filename + data = np.ones(shape) * num + elif isinstance(syn_data_type, int): + data = np.ones(shape) * syn_data_type + else: + raise ValueError("Not a recognised choice of type of synthetic bout data.") + + T = DataArray(data, dims=["t", "x", "y", "z"]) + n = DataArray(data, dims=["t", "x", "y", "z"]) + S = DataArray(data[:, :, :, 0], dims=["t", "x", "y"]) + for v in [n, T]: + v.attrs["direction_y"] = "Standard" + v.attrs["cell_location"] = "CELL_CENTRE" + v.attrs["direction_z"] = "Standard" + for v in [S]: + v.attrs["direction_y"] = "Standard" + v.attrs["cell_location"] = "CELL_CENTRE" + v.attrs["direction_z"] = "Average" + ds = Dataset({"n": n, "T": T, "S": S}) + + # BOUT_VERSION needed to deal with backwards incompatible changes: + # + # - v3 and earlier: number of points in z is MZ-1 + # - v4 and later: number of points in z is MZ + # - v5 and later: metric components can be either 2D or 3D + # - v5 and later: dz changed to be a Field2D/3D + ds["BOUT_VERSION"] = 5.0 if bout_v5 else 4.3 + ds["use_metric_3d"] = int(metric_3D) + + # Include grid data + ds["NXPE"] = nxpe + ds["NYPE"] = nype + ds["NZPE"] = 1 + ds["PE_XIND"] = xproc + ds["PE_YIND"] = yproc + ds["MYPE"] = num + + ds["MXG"] = mxg + ds["MYG"] = myg + ds["MZG"] = 0 + ds["nx"] = nx + ds["ny"] = ny + ds["nz"] = nz + ds["MZ"] = 1 * lengths[3] + if squashed: + ds["MXSUB"] = lengths[1] // nxpe + ds["MYSUB"] = lengths[2] // nype + ds["MZSUB"] = lengths[3] + else: + ds["MXSUB"] = lengths[1] + ds["MYSUB"] = lengths[2] + ds["MZSUB"] = lengths[3] + + MYSUB = lengths[2] + + if topology == "core": + ds["ixseps1"] = nx + ds["ixseps2"] = nx + ds["jyseps1_1"] = -1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - 1 + ds["ny_inner"] = ny // 2 + elif topology == "sol": + ds["ixseps1"] = 0 + ds["ixseps2"] = 0 + ds["jyseps1_1"] = -1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - 1 + ds["ny_inner"] = ny // 2 + elif topology == "limiter": + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx + ds["jyseps1_1"] = -1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - 1 + ds["ny_inner"] = ny // 2 + elif topology == "xpoint": + if nype < 4 and not squashed: + raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx // 2 + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 2 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = MYSUB - 1 + ds["jyseps1_2"] = ny - MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + elif topology == "single-null": + if nype < 3 and not squashed: + raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx + ds["jyseps1_1"] = MYSUB - 1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + ds["ny_inner"] = ny // 2 + elif topology == "connected-double-null": + if nype < 6 and not squashed: + raise ValueError( + "Not enough processors for connected-double-null topology: " + f"nype={nype}" + ) + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx // 2 + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 3 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = ny_inner - MYSUB - 1 + ds["jyseps1_2"] = ny_inner + MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + elif topology == "lower-disconnected-double-null": + if nype < 6 and not squashed: + raise ValueError( + "Not enough processors for lower-disconnected-double-null " + f"topology: nype={nype}" + ) + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx // 2 + 4 + if ds["ixseps2"] >= nx: + raise ValueError( + "Not enough points in the x-direction. ixseps2=" + f'{ds["ixseps2"]} > nx={nx}' + ) + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 3 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = ny_inner - MYSUB - 1 + ds["jyseps1_2"] = ny_inner + MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + elif topology == "upper-disconnected-double-null": + if nype < 6 and not squashed: + raise ValueError( + "Not enough processors for upper-disconnected-double-null " + f"topology: nype={nype}" + ) + ds["ixseps2"] = nx // 2 + ds["ixseps1"] = nx // 2 + 4 + if ds["ixseps2"] >= nx: + raise ValueError( + "Not enough points in the x-direction. ixseps2=" + f'{ds["ixseps2"]} > nx={nx}' + ) + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 3 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = ny_inner - MYSUB - 1 + ds["jyseps1_2"] = ny_inner + MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + else: + raise ValueError(f"Unrecognised topology={topology}") + + if metric_3D: + one = DataArray(np.ones((x_length, y_length, z_length)), dims=["x", "y", "z"]) + zero = DataArray(np.zeros((x_length, y_length, z_length)), dims=["x", "y", "z"]) + else: + one = DataArray(np.ones((x_length, y_length)), dims=["x", "y"]) + zero = DataArray(np.zeros((x_length, y_length)), dims=["x", "y"]) + + ds["zperiod"] = 1 + ds["ZMIN"] = 0.0 + ds["ZMAX"] = 1.0 + ds["g11"] = one + ds["g22"] = one + ds["g33"] = one + ds["g12"] = zero + ds["g13"] = zero + ds["g23"] = zero + ds["g_11"] = one + ds["g_22"] = one + ds["g_33"] = one + ds["g_12"] = zero + ds["g_13"] = zero + ds["g_23"] = zero + ds["G1"] = zero + ds["G2"] = zero + ds["G3"] = zero + ds["J"] = one + ds["Bxy"] = one + ds["zShift"] = zero + + ds["dx"] = 0.5 * one + ds["dy"] = 2.0 * one + if bout_v5: + ds["dz"] = 2.0 * one * np.pi / nz + else: + ds["dz"] = 2.0 * np.pi / nz + + ds["iteration"] = t_length - 1 + ds["hist_hi"] = t_length - 1 + ds["t_array"] = DataArray(np.arange(t_length, dtype=float) * 10.0, dims="t") + ds["tt"] = ds["t_array"][-1] + + # xarray adds this encoding when opening a file. Emulate here as it may be used to + # get the file number + ds.encoding["source"] = f"BOUT.dmp.{num}.nc" + + _create_bout_ds_cache[call_args] = ds + return deepcopy(ds) + + +_create_bout_grid_ds_cache = {} + + +def create_bout_grid_ds(xsize=2, ysize=4, guards={}, topology="core", ny_inner=0): + call_args = _get_kwargs() + + try: + # Has been called with the same signature before, just return the cached result + return deepcopy(_create_bout_grid_ds_cache[call_args]) + except KeyError: + pass + + # Set the shape of the data in this dataset + mxg = guards.get("x", 0) + myg = guards.get("y", 0) + xsize += 2 * mxg + ysize += 2 * myg + + # jyseps* from grid file only ever used to check topology when loading the grid file, + # so do not need to be consistent with the main dataset + jyseps2_1 = ysize // 2 + jyseps1_2 = jyseps2_1 + + if "double-null" in topology or "xpoint" in topology: + # Has upper target as well + ysize += 2 * myg + + # make different from jyseps2_1 so double-null toplogy is recognised + jyseps1_2 += 1 + + shape = (xsize, ysize) + + data = DataArray(np.ones(shape), dims=["x", "y"]) + + ds = Dataset( + { + "psixy": data, + "Rxy": data, + "Zxy": data, + "hthe": data, + "y_boundary_guards": myg, + "jyseps2_1": jyseps2_1, + "jyseps1_2": jyseps1_2, + "ny_inner": ny_inner, + } + ) + + _create_bout_grid_ds_cache[call_args] = ds + return deepcopy(ds) From edbf1efd7735ef371c094a12f13de60aea50f3c2 Mon Sep 17 00:00:00 2001 From: David Bold Date: Thu, 5 Oct 2023 10:10:49 +0200 Subject: [PATCH 41/42] setuptool_scm supports git-archive since version 7 Fixes: #297 --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f70fe22a..05fb8183 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,7 @@ [build-system] requires = [ "setuptools >= 65", - "setuptools_scm[toml] >= 6.2", - "setuptools_scm_git_archive", + "setuptools_scm[toml] >= 7", "wheel >= 0.29.0", ] build-backend = "setuptools.build_meta" From af8dc99f7365031b05504a4677bdb5cf8c653999 Mon Sep 17 00:00:00 2001 From: John Omotani Date: Fri, 24 Nov 2023 15:37:52 +0000 Subject: [PATCH 42/42] Convert int to numpy.intc in to_restart BOUT++ expects to read a C 'int', while Python defaults to writing an int64, causing an error. --- xbout/utils.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/xbout/utils.py b/xbout/utils.py index 2711d37d..32be7edc 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -492,21 +492,23 @@ def _split_into_restarts(ds, variables, savepath, nxpe, nype, tind, prefix, over if isinstance(value, str): # Write strings as byte-strings so BOUT++ can read them value = value.encode() + elif isinstance(value, int): + value = np.intc(value) restart_ds[v] = value # These variables need to be altered, because they depend on the number of # files and/or the rank of this file. - restart_ds["MXSUB"] = mxsub - restart_ds["MYSUB"] = mysub - restart_ds["NXPE"] = nxpe - restart_ds["NYPE"] = nype - restart_ds["PE_XIND"] = xproc - restart_ds["PE_YIND"] = yproc - restart_ds["hist_hi"] = hist_hi - restart_ds["PE_XIND"] = xproc - restart_ds["PE_YIND"] = yproc - restart_ds["MYPE"] = yproc * nxpe + xproc + restart_ds["MXSUB"] = np.intc(mxsub) + restart_ds["MYSUB"] = np.intc(mysub) + restart_ds["NXPE"] = np.intc(nxpe) + restart_ds["NYPE"] = np.intc(nype) + restart_ds["PE_XIND"] = np.intc(xproc) + restart_ds["PE_YIND"] = np.intc(yproc) + restart_ds["hist_hi"] = np.intc(hist_hi) + restart_ds["PE_XIND"] = np.intc(xproc) + restart_ds["PE_YIND"] = np.intc(yproc) + restart_ds["MYPE"] = np.intc(yproc * nxpe + xproc) # tt is the simulation time where the restart happens restart_ds["tt"] = tt