From 3e27d85d50c2eded6859193e1ab08d9336f81244 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 18 May 2023 10:05:53 -0300 Subject: [PATCH 01/49] minor fixes, left over from #1026 --- compliance_checker/acdd.py | 4 ++-- compliance_checker/cf/cf_1_6.py | 4 ++-- compliance_checker/cf/cf_1_7.py | 2 -- compliance_checker/cf/cf_1_9.py | 2 -- compliance_checker/cf/cf_base.py | 2 +- 5 files changed, 5 insertions(+), 9 deletions(-) diff --git a/compliance_checker/acdd.py b/compliance_checker/acdd.py index 603b4156..6502f45d 100644 --- a/compliance_checker/acdd.py +++ b/compliance_checker/acdd.py @@ -256,7 +256,7 @@ def check_lat_extents(self, ds): # identify lat var(s) as per CF 4.1 lat_vars = {} # var -> number of criteria passed - for _name, var in ds.variables.items(): + for var in ds.variables.values(): # must have units if not hasattr(var, "units"): continue @@ -354,7 +354,7 @@ def check_lon_extents(self, ds): # identify lon var(s) as per CF 4.2 lon_vars = {} # var -> number of criteria passed - for _name, var in ds.variables.items(): + for var in ds.variables.values(): # must have units if not hasattr(var, "units"): continue diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 4519f2fb..57eb6532 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -416,7 +416,7 @@ def check_fill_value_equal_missing_value(self, ds): fails = [] total = 0 - for _name, variable in ds.variables.items(): + for variable in ds.variables.values(): # If the variable have a defined _FillValue a defined missing_value check it. if hasattr(variable, "_FillValue") and hasattr(variable, "missing_value"): @@ -447,7 +447,7 @@ def check_valid_range_or_valid_min_max_present(self, ds): fails = [] total = 0 - for _name, variable in ds.variables.items(): + for variable in ds.variables.values(): if hasattr(variable, "valid_max") and ( hasattr(variable, "valid_min") or hasattr(variable, "valid_range") ): diff --git a/compliance_checker/cf/cf_1_7.py b/compliance_checker/cf/cf_1_7.py index fbfac740..1ff97520 100644 --- a/compliance_checker/cf/cf_1_7.py +++ b/compliance_checker/cf/cf_1_7.py @@ -794,8 +794,6 @@ def _evaluate_towgs84(self, val): return (True, msg) def check_grid_mapping(self, ds): - # FIXME: Looks like this is not needed. - # super().check_grid_mapping.__doc__ prev_return = super().check_grid_mapping(ds) grid_mapping_variables = cfutil.get_grid_mapping_variables(ds) for var_name in sorted(grid_mapping_variables): diff --git a/compliance_checker/cf/cf_1_9.py b/compliance_checker/cf/cf_1_9.py index 4f05fef9..5e085bd7 100644 --- a/compliance_checker/cf/cf_1_9.py +++ b/compliance_checker/cf/cf_1_9.py @@ -76,8 +76,6 @@ def check_time_coordinate_variable_has_calendar(self, ds): return ret_val def check_time_coordinate(self, ds): - # FIXME: Looks like this is not needed. - # super().check_calendar.__doc__ prev_return = super().check_time_coordinate(ds) seconds_regex = regex.compile( r"\w+ since \d{1,4}-\d{1,2}-\d{1,2}[ T]" diff --git a/compliance_checker/cf/cf_base.py b/compliance_checker/cf/cf_base.py index a6f6ed5d..2e420c81 100644 --- a/compliance_checker/cf/cf_base.py +++ b/compliance_checker/cf/cf_base.py @@ -567,7 +567,7 @@ def _find_ancillary_vars(self, ds, refresh=False): # Invalidate the cache at all costs self._ancillary_vars[ds] = [] - for _name, var in ds.variables.items(): + for var in ds.variables.values(): if hasattr(var, "ancillary_variables"): for anc_name in var.ancillary_variables.split(" "): if anc_name in ds.variables: From b902a7b769ccab343b245e490ddbef58e8e3c655 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 18 May 2023 10:21:24 -0300 Subject: [PATCH 02/49] fix more broken links --- compliance_checker/cf/cf_1_6.py | 2 +- compliance_checker/ioos.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 57eb6532..04ec8962 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -109,7 +109,7 @@ def check_child_attr_data_types(self, ds): - add_offset - _FillValue the data type of the attribute must match the type of its parent variable as specified in the - NetCDF User Guide (NUG) https://www.unidata.ucar.edu/software/netcdf/docs/attribute_conventions.html, + NetCDF User Guide (NUG) https://docs.unidata.ucar.edu/netcdf-c/current/attribute_conventions.html, referenced in the CF Conventions in Section 2.5.2 (http://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/cf-conventions.html#missing-data) diff --git a/compliance_checker/ioos.py b/compliance_checker/ioos.py index bd06160a..fe7dbbfd 100644 --- a/compliance_checker/ioos.py +++ b/compliance_checker/ioos.py @@ -1328,7 +1328,7 @@ def check_platform_vocabulary(self, ds): """ The platform_vocabulary attribute is recommended to be a URL to https://mmisw.org/ont/ioos/platform or - http://vocab.nerc.ac.uk/collection/L06/current/. However, + https://vocab.nerc.ac.uk/collection/L06/current/. However, it is required to at least be a URL. Args: From 786e9ab3f07e196ef99e734f7db353e072d7ca55 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 23 May 2023 14:04:41 -0300 Subject: [PATCH 03/49] workaround latest urllib3 --- test_requirements.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test_requirements.txt b/test_requirements.txt index 1ebf0279..3b2c22ad 100644 --- a/test_requirements.txt +++ b/test_requirements.txt @@ -11,3 +11,5 @@ pytest>=2.9.0 pytest-cov>=3.0.0 pytest-vcr requests-mock>=1.7.0 +# We need this pin until a new version of vcrpy is out. +urllib3<2 From 76581fb748d63f4414a29d9aa5b3e39c131b8f12 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 17:53:10 +0000 Subject: [PATCH 04/49] Bump mamba-org/provision-with-micromamba from 15 to 16 Bumps [mamba-org/provision-with-micromamba](https://github.com/mamba-org/provision-with-micromamba) from 15 to 16. - [Release notes](https://github.com/mamba-org/provision-with-micromamba/releases) - [Commits](https://github.com/mamba-org/provision-with-micromamba/compare/v15...v16) --- updated-dependencies: - dependency-name: mamba-org/provision-with-micromamba dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/cc-checker-ugrid-test.yml | 2 +- .github/workflows/cc-plugin-glider-test.yml | 2 +- .github/workflows/cc-plugin-sgrid-test.yml | 2 +- .github/workflows/cc-plugin-ugrid-test.yml | 2 +- .github/workflows/codecov.yml | 2 +- .github/workflows/default-tests.yml | 2 +- .github/workflows/deploy-docs.yml | 2 +- .github/workflows/integration-tests.yml | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/cc-checker-ugrid-test.yml b/.github/workflows/cc-checker-ugrid-test.yml index fd129011..a8365165 100644 --- a/.github/workflows/cc-checker-ugrid-test.yml +++ b/.github/workflows/cc-checker-ugrid-test.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false diff --git a/.github/workflows/cc-plugin-glider-test.yml b/.github/workflows/cc-plugin-glider-test.yml index e1de1fa2..f43ed6d9 100644 --- a/.github/workflows/cc-plugin-glider-test.yml +++ b/.github/workflows/cc-plugin-glider-test.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false diff --git a/.github/workflows/cc-plugin-sgrid-test.yml b/.github/workflows/cc-plugin-sgrid-test.yml index 272c9258..560d7121 100644 --- a/.github/workflows/cc-plugin-sgrid-test.yml +++ b/.github/workflows/cc-plugin-sgrid-test.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false diff --git a/.github/workflows/cc-plugin-ugrid-test.yml b/.github/workflows/cc-plugin-ugrid-test.yml index fd129011..a8365165 100644 --- a/.github/workflows/cc-plugin-ugrid-test.yml +++ b/.github/workflows/cc-plugin-ugrid-test.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index e5a2340b..63858427 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false diff --git a/.github/workflows/default-tests.yml b/.github/workflows/default-tests.yml index c3b7c20e..4889f8a8 100644 --- a/.github/workflows/default-tests.yml +++ b/.github/workflows/default-tests.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 2936f8db..1f894886 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -19,7 +19,7 @@ jobs: fetch-depth: 0 - name: Setup Mamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 63499878..89ccc00b 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v15 + uses: mamba-org/provision-with-micromamba@v16 with: environment-file: false From dbe95ad37782d7db870e1121149ca7f6c6ee2cfb Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 25 May 2023 12:20:36 -0300 Subject: [PATCH 05/49] redundant --- .github/workflows/cc-checker-ugrid-test.yml | 34 --------------------- 1 file changed, 34 deletions(-) delete mode 100644 .github/workflows/cc-checker-ugrid-test.yml diff --git a/.github/workflows/cc-checker-ugrid-test.yml b/.github/workflows/cc-checker-ugrid-test.yml deleted file mode 100644 index a8365165..00000000 --- a/.github/workflows/cc-checker-ugrid-test.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: UGRID Plugin Tests - -on: - pull_request: - push: - -jobs: - run: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 - with: - environment-file: false - - - name: Setup Env - shell: bash -l {0} - run: > - micromamba create --name TEST python=3 pip --file requirements.txt --file test_requirements.txt --channel conda-forge - && micromamba activate TEST - && pip install -e . --no-deps --force-reinstall - - - name: cc-plugin-glider tests - shell: bash -l {0} - run: > - micromamba activate TEST - && git clone https://github.com/ioos/cc-checker-ugrid.git - && cd cc-checker-ugrid - && micromamba install --file requirements.txt --file requirements-dev.txt --channel conda-forge - && pip install -e . --no-deps --force-reinstall - && pytest -s -rxs -v cc_plugin_ugrid From 620cbb838c90df1e970572772fa8c033a85a8b53 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 25 May 2023 12:40:06 -0300 Subject: [PATCH 06/49] provision-with-micromamba is deprecated --- .github/workflows/cc-plugin-glider-test.yml | 25 ++++++++++--------- .github/workflows/cc-plugin-sgrid-test.yml | 27 ++++++++++++--------- .github/workflows/cc-plugin-ugrid-test.yml | 27 ++++++++++++--------- .github/workflows/codecov.yml | 21 +++++++++------- .github/workflows/default-tests.yml | 24 +++++++++--------- .github/workflows/deploy-docs.yml | 19 +++++++++------ .github/workflows/integration-tests.yml | 22 +++++++++-------- 7 files changed, 92 insertions(+), 73 deletions(-) diff --git a/.github/workflows/cc-plugin-glider-test.yml b/.github/workflows/cc-plugin-glider-test.yml index f43ed6d9..037ff32b 100644 --- a/.github/workflows/cc-plugin-glider-test.yml +++ b/.github/workflows/cc-plugin-glider-test.yml @@ -12,23 +12,26 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=3 pip + --file requirements.txt + --file test_requirements.txt + --channel conda-forge - - name: Setup Env + - name: Install compliance-checker shell: bash -l {0} - run: > - micromamba create --name TEST python=3 pip --file requirements.txt --file test_requirements.txt --channel conda-forge - && micromamba activate TEST - && pip install -e . --no-deps --force-reinstall + run: | + python -m pip install -v -e . --no-deps --force-reinstall - name: cc-plugin-glider tests shell: bash -l {0} run: > - micromamba activate TEST - && git clone https://github.com/ioos/cc-plugin-glider.git + git clone https://github.com/ioos/cc-plugin-glider.git && cd cc-plugin-glider && micromamba install --file requirements.txt --file requirements-dev.txt --channel conda-forge - && pip install -e . --no-deps --force-reinstall - && pytest -s -rxs -v cc_plugin_glider + && python -m pip install -e . --no-deps --force-reinstall + && python -m pytest -s -rxs -v cc_plugin_glider diff --git a/.github/workflows/cc-plugin-sgrid-test.yml b/.github/workflows/cc-plugin-sgrid-test.yml index 560d7121..fec9a182 100644 --- a/.github/workflows/cc-plugin-sgrid-test.yml +++ b/.github/workflows/cc-plugin-sgrid-test.yml @@ -12,22 +12,25 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=3 pip + --file requirements.txt + --file test_requirements.txt + --channel conda-forge - - name: Setup Env + - name: Install compliance-checker shell: bash -l {0} - run: > - micromamba create --name TEST python=3 pip --file requirements.txt --file test_requirements.txt --channel conda-forge - && micromamba activate TEST - && pip install -e . --no-deps --force-reinstall + run: | + python -m pip install -e . --no-deps --force-reinstall - - name: cc-plugin-glider tests + - name: cc-plugin-sgrid tests shell: bash -l {0} run: > - micromamba activate TEST - && git clone https://github.com/ioos/cc-plugin-sgrid.git + git clone https://github.com/ioos/cc-plugin-sgrid.git && cd cc-plugin-sgrid - && pip install -e . --no-deps --force-reinstall - && pytest -s -rxs -v cc_plugin_sgrid + && python -m pip install -e . --no-deps --force-reinstall + && python -m pytest -s -rxs -v cc_plugin_sgrid diff --git a/.github/workflows/cc-plugin-ugrid-test.yml b/.github/workflows/cc-plugin-ugrid-test.yml index a8365165..92d346df 100644 --- a/.github/workflows/cc-plugin-ugrid-test.yml +++ b/.github/workflows/cc-plugin-ugrid-test.yml @@ -12,23 +12,26 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=3 pip + --file requirements.txt + --file test_requirements.txt + --channel conda-forge - - name: Setup Env + - name: Install compliance-checker shell: bash -l {0} - run: > - micromamba create --name TEST python=3 pip --file requirements.txt --file test_requirements.txt --channel conda-forge - && micromamba activate TEST - && pip install -e . --no-deps --force-reinstall + run: | + python -m pip install -e . --no-deps --force-reinstall - - name: cc-plugin-glider tests + - name: cc-plugin-ugrid tests shell: bash -l {0} run: > - micromamba activate TEST - && git clone https://github.com/ioos/cc-checker-ugrid.git + git clone https://github.com/ioos/cc-checker-ugrid.git && cd cc-checker-ugrid && micromamba install --file requirements.txt --file requirements-dev.txt --channel conda-forge - && pip install -e . --no-deps --force-reinstall - && pytest -s -rxs -v cc_plugin_ugrid + && python -m pip install -e . --no-deps --force-reinstall + && python -m pytest -s -rxs -v cc_plugin_ugrid diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index 63858427..55db7a0a 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -12,22 +12,25 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=3 pip + --file requirements.txt + --file test_requirements.txt + --channel conda-forge - - name: Setup Env + - name: Install compliance-checker shell: bash -l {0} - run: > - micromamba create --name TEST python=3 pip --file requirements.txt --file test_requirements.txt --channel conda-forge - && micromamba activate TEST - && pip install -e . --no-deps --force-reinstall + run: | + python -m pip install -e . --no-deps --force-reinstall - name: Run tests with coverage shell: bash -l {0} run: | - micromamba activate TEST - pytest --cov=compliance_checker --cov-report=xml compliance_checker/tests + python -m pytest --cov=compliance_checker --cov-report=xml compliance_checker/tests # pass this step even if there are individual test failures, we are # interested in the overall level of coverage and other checks can # report on test failures. diff --git a/.github/workflows/default-tests.yml b/.github/workflows/default-tests.yml index 4889f8a8..55e0e823 100644 --- a/.github/workflows/default-tests.yml +++ b/.github/workflows/default-tests.yml @@ -16,20 +16,22 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + - name: Setup Micromamba ${{ matrix.python-version }} + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=${{ matrix.python-version } pip + --file requirements.txt + --file test_requirements.txt + --channel conda-forge - - name: Setup Env ${{ matrix.python-version }} + - name: Install compliance-checker shell: bash -l {0} - run: > - micromamba create --name TEST python=${{ matrix.python-version }} pip --file requirements.txt --file test_requirements.txt --channel conda-forge - && micromamba activate TEST - && pip install -e . --no-deps --force-reinstall + run: | + python -m pip install -e . --no-deps --force-reinstall - name: Default Tests shell: bash -l {0} - run: | - micromamba activate TEST - pytest -s -rxs -v -k "not integration" compliance_checker + run: python -m pytest -s -rxs -v -k "not integration" compliance_checker diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 1f894886..3b22fe7c 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -18,23 +18,26 @@ jobs: with: fetch-depth: 0 - - name: Setup Mamba - uses: mamba-org/provision-with-micromamba@v16 + - name: Setup Micromamba + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false - - - name: Build environment + environment-name: TEST + init-shell: bash + create-args: >- + python=3 pip + --file requirements.txt + --file test_requirements.txt + --channel conda-forge + + - name: Install compliance-checker shell: bash -l {0} run: | - micromamba create --name TEST python=3 --file requirements.txt --file test_requirements.txt --channel conda-forge - micromamba activate TEST python -m pip install -e . --no-deps --force-reinstall - name: Build documentation shell: bash -l {0} run: | set -e - micromamba activate TEST pushd docs cp ../README.md source/quickintro.md make clean html linkcheck diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 89ccc00b..ecf49146 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -12,19 +12,21 @@ jobs: - uses: actions/checkout@v3 - name: Setup Micromamba - uses: mamba-org/provision-with-micromamba@v16 + uses: mamba-org/setup-micromamba@v1 with: - environment-file: false + environment-name: TEST + init-shell: bash + create-args: >- + python=3 pip + --file requirements.txt + --file test_requirements.txt + --channel conda-forge - - name: Setup Env + - name: Install compliance-checker shell: bash -l {0} - run: > - micromamba create --name TEST python=3 pip --file requirements.txt --file test_requirements.txt --channel conda-forge - && micromamba activate TEST - && pip install -e . --no-deps --force-reinstall + run: | + python -m pip install -e . --no-deps --force-reinstall - name: Integration Tests shell: bash -l {0} - run: | - micromamba activate TEST - pytest -m "integration" -s -rxs -v --vcr-record=none compliance_checker + run: python -m pytest -m "integration" -s -rxs -v --vcr-record=none compliance_checker From dee46d161f8206719458cdc521e7702b3d9cca12 Mon Sep 17 00:00:00 2001 From: Benjamin Adams Date: Thu, 25 May 2023 16:27:27 -0400 Subject: [PATCH 07/49] Pin isodate >= 0.6.1 in requirements.txt Some builds were failing on isodate requirements due to newer setuptools not having 2to3 support. Newer builds of isodate shouldn't have this issue. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6c3dc9de..47b9921c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ cf-units>=2 cftime>=1.1.0 -isodate>=0.5.4 +isodate>=0.6.1 jinja2>=2.7.3 lxml>=3.2.1 netcdf4>=1.5.7 From 445c86a9fe496142bf547137394c08948b81fd0d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 6 Jun 2023 00:37:46 +0000 Subject: [PATCH 08/49] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/charliermarsh/ruff-pre-commit: v0.0.267 → v0.0.270](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.267...v0.0.270) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eea05f7e..6a81071d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,7 @@ repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.267 + rev: v0.0.270 hooks: - id: ruff From e8246f95c0e51b3f8ba3593bee676cbe205b27ae Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 18 May 2023 13:34:32 -0300 Subject: [PATCH 09/49] remove MemoizedDataset --- compliance_checker/__init__.py | 16 ---------------- compliance_checker/base.py | 4 ++-- compliance_checker/cf/cf_1_8.py | 3 +-- compliance_checker/suite.py | 8 ++++---- pyproject.toml | 1 - 5 files changed, 7 insertions(+), 25 deletions(-) diff --git a/compliance_checker/__init__.py b/compliance_checker/__init__.py index 783dd071..f5975758 100644 --- a/compliance_checker/__init__.py +++ b/compliance_checker/__init__.py @@ -1,29 +1,13 @@ from contextlib import contextmanager -from functools import lru_cache from tempfile import NamedTemporaryFile from typing import BinaryIO, Generator -from netCDF4 import Dataset - try: from ._version import __version__ except ImportError: __version__ = "unknown" -class MemoizedDataset(Dataset): - """ - A NetCDF dataset which has its get_variables_by_attributes call memoized in - order to speed up repeated calls to the function. This should only really - be used against netCDF Datasets opened in 'r' mode, as the attributes should - not change upon reading the files. - """ - - @lru_cache(128) - def get_variables_by_attributes(self, **kwargs): - return super().get_variables_by_attributes(**kwargs) - - @contextmanager def tempnc(data: BinaryIO) -> Generator[str, None, None]: """ diff --git a/compliance_checker/base.py b/compliance_checker/base.py index 34981221..347bee7d 100644 --- a/compliance_checker/base.py +++ b/compliance_checker/base.py @@ -20,7 +20,7 @@ from owslib.swe.sensor.sml import SensorML import compliance_checker.cfutil as cfutil -from compliance_checker import MemoizedDataset, __version__ +from compliance_checker import __version__ from compliance_checker.util import kvp_convert # Python 3.5+ should work, also have a fallback @@ -201,7 +201,7 @@ class BaseNCCheck: Base Class for NetCDF Dataset supporting Check Suites. """ - supported_ds = {Dataset, MemoizedDataset} + supported_ds = [Dataset] @classmethod def std_check_in(cls, dataset, name, allowed_vals): diff --git a/compliance_checker/cf/cf_1_8.py b/compliance_checker/cf/cf_1_8.py index 3c1f5d79..0f001a8c 100644 --- a/compliance_checker/cf/cf_1_8.py +++ b/compliance_checker/cf/cf_1_8.py @@ -20,7 +20,6 @@ from netCDF4 import Dataset from shapely.geometry import Polygon -from compliance_checker import MemoizedDataset from compliance_checker.base import BaseCheck, TestCtx from compliance_checker.cf.cf_1_7 import CF1_7Check from compliance_checker.cf.util import reference_attr_variables, string_from_var_type @@ -46,7 +45,7 @@ def __init__(self, options=None): }, ) - def check_groups(self, ds: MemoizedDataset): + def check_groups(self, ds: Dataset): """ 2.7.2. Application of attributes diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index 3eb6ecad..7f435208 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -25,7 +25,7 @@ from owslib.swe.sensor.sml import SensorML from pkg_resources import working_set -from compliance_checker import MemoizedDataset, __version__, tempnc +from compliance_checker import __version__, tempnc from compliance_checker.base import BaseCheck, GenericFile, Result, fix_return_value from compliance_checker.protocols import cdl, netcdf, opendap @@ -828,7 +828,7 @@ def check_remote_netcdf(self, ds_str): if netcdf.is_remote_netcdf(ds_str): response = requests.get(ds_str, allow_redirects=True, timeout=60) try: - return MemoizedDataset( + return Dataset( urlparse(response.url).path, memory=response.content, ) @@ -836,7 +836,7 @@ def check_remote_netcdf(self, ds_str): # handle case when netCDF C libs weren't compiled with # in-memory support by using tempfile with tempnc(response.content) as _nc: - return MemoizedDataset(_nc) + return Dataset(_nc) def load_remote_dataset(self, ds_str): """ @@ -893,7 +893,7 @@ def load_local_dataset(self, ds_str): ds_str = self.generate_dataset(ds_str) if netcdf.is_netcdf(ds_str): - return MemoizedDataset(ds_str) + return Dataset(ds_str) # Assume this is just a Generic File if it exists if os.path.isfile(ds_str): diff --git a/pyproject.toml b/pyproject.toml index 63198262..2cd038aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,5 +39,4 @@ ignore = [ "E402", "A001", ] -"compliance_checker/__init__.py" = ["B019"] "compliance_checker/cfutil.py" = ["B028"] From 29378c4b64dbc50bbf1c2ca45e753e1b33cceff8 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 6 Jun 2023 11:35:13 -0300 Subject: [PATCH 10/49] restrict to the upstream feat ver --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 47b9921c..3df7b0d9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ cftime>=1.1.0 isodate>=0.6.1 jinja2>=2.7.3 lxml>=3.2.1 -netcdf4>=1.5.7 +netcdf4>=1.6.4 owsLib>=0.8.3 pendulum>=1.2.4 pygeoif>=0.6 From 4d66c02fc04d751688559f5a08fccc3e7d7a65f0 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 6 Jun 2023 11:35:25 -0300 Subject: [PATCH 11/49] update min python --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 00b4877c..e26a432b 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ def pip_requirements(fname="requirements.txt"): url="https://github.com/ioos/compliance-checker", packages=find_packages(), install_requires=pip_requirements(), - python_requires="~=3.5", + python_requires="~=3.7", tests_require=["pytest"], classifiers=[ "Development Status :: 5 - Production/Stable", From f6a01dc3355f3954d04a31aafc3058ad9915cf1d Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Fri, 7 Jul 2023 10:01:52 -0300 Subject: [PATCH 12/49] update pre-commits --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a81071d..8d349177 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,23 +25,23 @@ repos: language_version: python3 - repo: https://github.com/asottile/add-trailing-comma - rev: v2.4.0 + rev: v3.0.0 hooks: - id: add-trailing-comma - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.270 + rev: v0.0.277 hooks: - id: ruff - repo: https://github.com/tox-dev/pyproject-fmt - rev: 0.11.2 + rev: 0.13.0 hooks: - id: pyproject-fmt - repo: https://github.com/codespell-project/codespell - rev: v2.2.4 + rev: v2.2.5 hooks: - id: codespell args: From 29d67a5279246635b52b02dac4c425b778d4c5a1 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Fri, 7 Jul 2023 10:02:16 -0300 Subject: [PATCH 13/49] remove dups in sets --- compliance_checker/cf/util.py | 2 -- compliance_checker/tests/test_suite.py | 1 - 2 files changed, 3 deletions(-) diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index d033cd52..d57f7d86 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -73,7 +73,6 @@ "xlon", "XLON", "lonx", - "lonx", "lon_u", "LON_U", "lon_v", @@ -97,7 +96,6 @@ "ylat", "YLAT", "laty", - "laty", "lat_u", "LAT_U", "lat_v", diff --git a/compliance_checker/tests/test_suite.py b/compliance_checker/tests/test_suite.py index 1708a8d9..54f49a95 100644 --- a/compliance_checker/tests/test_suite.py +++ b/compliance_checker/tests/test_suite.py @@ -146,7 +146,6 @@ def test_skip_check_level(self): "§3.5 flag_meanings for lat", "§3.5 flag_meanings for lon", "§3.5 lat is a valid flags variable", - "§3.5 lat is a valid flags variable", "§3.5 lon is a valid flags variable", } From 6370ff69f71ab13471ab7838cf91445b07d69e9f Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Fri, 7 Jul 2023 10:02:25 -0300 Subject: [PATCH 14/49] add a skip for dups we want to keep --- pyproject.toml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2cd038aa..af8b5817 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,12 +6,6 @@ requires = [ "wheel", ] -[tool.pytest.ini_options] -markers = [ - "integration: marks integration tests (deselect with '-m \"not integration\"')", - "slowtest: marks slow tests (deselect with '-m \"not slowtest\"')" -] - [tool.ruff] select = [ "A", # flake8-builtins @@ -40,3 +34,10 @@ ignore = [ "A001", ] "compliance_checker/cfutil.py" = ["B028"] +"compliance_checker/cf/appendix_f.py" = ["B033"] # ignore duplicates items in the set + +[tool.pytest.ini_options] +markers = [ + "integration: marks integration tests (deselect with '-m \"not integration\"')", + "slowtest: marks slow tests (deselect with '-m \"not slowtest\"')" +] From dd9808cc365d1fa9b28c948fac65081a83ad8bdb Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 5 Sep 2023 14:43:03 -0300 Subject: [PATCH 15/49] let's skip this one for now --- docs/source/conf.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index 8ca29f8c..ee5d388c 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -165,3 +165,8 @@ "Miscellaneous", ), ] + +linkcheck_ignore = [ + # TODO: check again in the future + r"https://mmisw.org/ont/ioos/platform", # 2023-09-05 site non-responsive +] From b7df6bd7e5ccc8535e948b7d2133d60e9d8a0e24 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 1 Aug 2023 10:02:53 -0300 Subject: [PATCH 16/49] update --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8d349177..0004e6ef 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,19 +19,19 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.7.0 hooks: - id: black language_version: python3 - repo: https://github.com/asottile/add-trailing-comma - rev: v3.0.0 + rev: v3.0.1 hooks: - id: add-trailing-comma - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.277 + rev: v0.0.281 hooks: - id: ruff From 606110af0f8f9f0044433c3543533cccb38794a0 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 1 Aug 2023 10:03:02 -0300 Subject: [PATCH 17/49] fix .format calls --- cchecker.py | 8 +- compliance_checker/acdd.py | 4 +- compliance_checker/base.py | 5 +- compliance_checker/cf/cf_1_6.py | 102 ++++-------------- compliance_checker/cf/cf_1_7.py | 52 ++------- compliance_checker/cf/cf_base.py | 43 ++------ compliance_checker/cf/util.py | 9 +- compliance_checker/ioos.py | 39 ++----- compliance_checker/runner.py | 12 +-- compliance_checker/suite.py | 6 +- compliance_checker/tests/test_cf.py | 6 +- .../tests/test_cf_integration.py | 32 ++---- .../tests/test_feature_detection.py | 8 +- 13 files changed, 76 insertions(+), 250 deletions(-) diff --git a/cchecker.py b/cchecker.py index 90329cdf..72d022df 100755 --- a/cchecker.py +++ b/cchecker.py @@ -293,9 +293,7 @@ def main(): if output_len == 1: if args.format != "json": print( - "Running Compliance Checker on the datasets from: {}".format( - args.dataset_location, - ), + f"Running Compliance Checker on the datasets from: {args.dataset_location}", file=sys.stderr, ) return_value, errors = ComplianceChecker.run_checker( @@ -315,9 +313,7 @@ def main(): for output, dataset in zip(args.output, args.dataset_location): if args.format != "json": print( - "Running Compliance Checker on the dataset from: {}".format( - dataset, - ), + f"Running Compliance Checker on the dataset from: {dataset}", file=sys.stderr, ) return_value, errors = ComplianceChecker.run_checker( diff --git a/compliance_checker/acdd.py b/compliance_checker/acdd.py index 6502f45d..6ea610ac 100644 --- a/compliance_checker/acdd.py +++ b/compliance_checker/acdd.py @@ -676,9 +676,7 @@ def verify_convention_version(self, ds): return ratable_result((1, 2), "Global Attributes", m) except AttributeError: # NetCDF attribute not found m = [ - "No Conventions attribute present; must contain ACDD-{}".format( - self._cc_spec_version, - ), + f"No Conventions attribute present; must contain ACDD-{self._cc_spec_version}", ] # Result will have name "Global Attributes" to group with globals return ratable_result((0, 2), "Global Attributes", m) diff --git a/compliance_checker/base.py b/compliance_checker/base.py index 347bee7d..c2674e85 100644 --- a/compliance_checker/base.py +++ b/compliance_checker/base.py @@ -480,10 +480,7 @@ def attr_check(kvp, ds, priority, ret_val, gname=None, var_name=None): msgs.append(f"{display_name} not present") elif res == 1: msgs.append( - "{} present, but not in expected value list ({})".format( - display_name, - sorted(other), - ), + f"{display_name} present, but not in expected value list ({sorted(other)})", ) ret_val.append( diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 04ec8962..2cc5a74d 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -88,10 +88,7 @@ def check_data_types(self, ds): and v.dtype.type not in self._allowed_numeric_var_types ): fails.append( - "The variable {} failed because the datatype is {}".format( - k, - v.datatype, - ), + f"The variable {k} failed because the datatype is {v.datatype}", ) return Result( BaseCheck.HIGH, @@ -423,9 +420,7 @@ def check_fill_value_equal_missing_value(self, ds): total = total + 1 if variable._FillValue != variable.missing_value: fails.append( - "For the variable {} the missing_value must be equal to the _FillValue".format( - variable.name, - ), + f"For the variable {variable.name} the missing_value must be equal to the _FillValue", ) return Result( @@ -496,10 +491,7 @@ def check_fill_value_outside_valid_range(self, ds): m = "§2.5.1 Fill Values should be outside the range specified by valid_range" # subsection message valid_fill_range.assert_true( False, - "{};\n\t{}:valid_range must be a numeric type not a string".format( - m, - name, - ), + f"{m};\n\t{name}:valid_range must be a numeric type not a string", ) continue rmin, rmax = variable.valid_range @@ -720,10 +712,7 @@ def check_units(self, ds): # side effects, but better than teasing out the individual result if units is not None and units_attr_is_string.assert_true( isinstance(units, str), - "units ({}) attribute of '{}' must be a string compatible with UDUNITS".format( - units, - variable.name, - ), + f"units ({units}) attribute of '{variable.name}' must be a string compatible with UDUNITS", ): valid_udunits = self._check_valid_udunits(ds, name) ret_val.append(valid_udunits) @@ -808,9 +797,7 @@ def _check_valid_cf_units(self, ds, variable_name): # 1) Units must exist valid_units.assert_true( should_be_dimensionless or units is not None, - "units attribute is required for {} when variable is not a dimensionless quantity".format( - variable_name, - ), + f"units attribute is required for {variable_name} when variable is not a dimensionless quantity", ) # Don't bother checking the rest @@ -875,10 +862,7 @@ def _check_valid_udunits(self, ds, variable_name): are_udunits = units is not None and util.units_known(units) valid_udunits.assert_true( should_be_dimensionless or are_udunits or units is None, - 'units for {}, "{}" are not recognized by UDUNITS'.format( - variable_name, - units, - ), + f'units for {variable_name}, "{units}" are not recognized by UDUNITS', ) return valid_udunits.to_result() @@ -1039,9 +1023,7 @@ def check_standard_name(self, ds): valid_std_name = TestCtx(BaseCheck.HIGH, self.section_titles["3.3"]) valid_std_name.assert_true( isinstance(standard_name, str), - "Attribute standard_name for variable {} must be a string".format( - name, - ), + f"Attribute standard_name for variable {name} must be a string", ) valid_std_name.out_of += 1 if standard_name not in self._std_names: @@ -1080,9 +1062,7 @@ def check_standard_name(self, ds): # IMPLEMENTATION CONFORMANCE 3 RECOMMENDED long_or_std_name.assert_true( long_name_present or standard_name_present, - "Attribute long_name or/and standard_name is highly recommended for variable {}".format( - name, - ), + f"Attribute long_name or/and standard_name is highly recommended for variable {name}", ) ret_val.append(long_or_std_name.to_result()) return ret_val @@ -1209,9 +1189,7 @@ def check_flags(self, ds): allvr = Result(BaseCheck.MEDIUM, allv, self.section_titles["3.5"]) if not allvr.value: allvr.msgs = [ - "flag masks and flag values for '{}' combined don't equal flag values".format( - name, - ), + f"flag masks and flag values for '{name}' combined don't equal flag values", ] ret_val.append(allvr) @@ -1367,9 +1345,7 @@ def _check_flag_meanings(self, ds, name): if flag_regx.match(meaning) is None: valid_meanings.assert_true( False, - "{}'s flag_meanings attribute defined an illegal flag meaning ".format( - name, - ) + f"{name}'s flag_meanings attribute defined an illegal flag meaning " + f"{meaning}", ) return valid_meanings.to_result() @@ -2761,10 +2737,7 @@ def check_cell_boundaries(self, ds): if boundary_variable_name not in ds.variables: valid = False reasoning.append( - "Boundary variable {} referenced by {} not ".format( - boundary_variable_name, - variable.name, - ) + f"Boundary variable {boundary_variable_name} referenced by {variable.name} not " + "found in dataset variables", ) else: @@ -2774,10 +2747,7 @@ def check_cell_boundaries(self, ds): if boundary_variable.ndim < 2: valid = False reasoning.append( - "Boundary variable {} specified by {}".format( - boundary_variable.name, - variable.name, - ) + f"Boundary variable {boundary_variable.name} specified by {variable.name}" + " should have at least two dimensions to enclose the base " + "case of a one dimensionsal variable", ) @@ -3073,10 +3043,7 @@ def _check_cell_methods_paren_info(self, paren_contents, var): # attempt to get the number for the interval if not interval_matches: valid_info.messages.append( - '§7.3.3 {}:cell_methods contains an interval specification that does not parse: "{}". Should be in format "interval: "'.format( - var.name, - val, - ), + f'§7.3.3 {var.name}:cell_methods contains an interval specification that does not parse: "{val}". Should be in format "interval: "', ) else: try: @@ -3111,17 +3078,13 @@ def _check_cell_methods_paren_info(self, paren_contents, var): valid_info.out_of += 1 if len(pmatches) == 1: valid_info.messages.append( - "§7.3.3 If there is no standardized information, the keyword comment: should be omitted for variable {}".format( - var.name, - ), + f"§7.3.3 If there is no standardized information, the keyword comment: should be omitted for variable {var.name}", ) # otherwise check that the comment is the last # item in the parentheses elif i != len(pmatches) - 1: valid_info.messages.append( - '§7.3.3 The non-standard "comment:" element must come after any standard elements in cell_methods for variable {}'.format( - var.name, - ), + f'§7.3.3 The non-standard "comment:" element must come after any standard elements in cell_methods for variable {var.name}', ) # else: @@ -3129,20 +3092,14 @@ def _check_cell_methods_paren_info(self, paren_contents, var): else: valid_info.out_of += 1 valid_info.messages.append( - '§7.3.3 Invalid cell_methods keyword "{}" for variable {}. Must be one of [interval, comment]'.format( - keyword, - var.name, - ), + f'§7.3.3 Invalid cell_methods keyword "{keyword}" for variable {var.name}. Must be one of [interval, comment]', ) # Ensure concatenated reconstructed matches are the same as the # original string. If they're not, there's likely a formatting error valid_info.assert_true( "".join(m.group(0) for m in pmatches) == paren_contents, - "§7.3.3 Parenthetical content inside {}:cell_methods is not well formed: {}".format( - var.name, - paren_contents, - ), + f"§7.3.3 Parenthetical content inside {var.name}:cell_methods is not well formed: {paren_contents}", ) return valid_info @@ -3281,9 +3238,7 @@ def check_climatological_statistics(self, ds): ): total_climate_count += 1 reasoning.append( - "Climatology variable coordinates are in improper order: {}. Bounds-specific dimensions should be last".format( - ds.variables[clim_coord_var.climatology].dimensions, - ), + f"Climatology variable coordinates are in improper order: {ds.variables[clim_coord_var.climatology].dimensions}. Bounds-specific dimensions should be last", ) result = Result( BaseCheck.MEDIUM, @@ -3302,9 +3257,7 @@ def check_climatological_statistics(self, ds): != 2 ): reasoning.append( - 'Climatology dimension "{}" should only contain two elements'.format( - ds.variables[clim_coord_var.climatology].name, - ), + f'Climatology dimension "{ds.variables[clim_coord_var.climatology].name}" should only contain two elements', ) total_climate_count += 1 result = Result( @@ -3346,9 +3299,7 @@ def check_climatological_statistics(self, ds): total_climate_count += 1 if not regex.search(re_string, cell_method_var.cell_methods): reasoning.append( - 'The "time: method within years/days over years/days" format is not correct in variable {}.'.format( - cell_method_var.name, - ), + f'The "time: method within years/days over years/days" format is not correct in variable {cell_method_var.name}.', ) else: valid_climate_count += 1 @@ -3536,9 +3487,7 @@ def check_compression_gathering(self, ds): if compress_var.ndim != 1: valid = False reasoning.append( - "Compression variable {} may only have one dimension".format( - compress_var.name, - ), + f"Compression variable {compress_var.name} may only have one dimension", ) # IMPLEMENTATION CONFORMANCE 8.2 REQUIRED 1/3 # ensure compression variable is a proper index, and thus is an @@ -3548,9 +3497,7 @@ def check_compression_gathering(self, ds): ): valid = False reasoning.append( - "Compression variable {} must be an integer type to form a proper array index".format( - compress_var.name, - ), + f"Compression variable {compress_var.name} must be an integer type to form a proper array index", ) # IMPLEMENTATION CONFORMANCE 8.2 REQUIRED 2/3 # make sure all the variables referred to are contained by the @@ -3559,10 +3506,7 @@ def check_compression_gathering(self, ds): not_in_dims = sorted(compress_set.difference(ds.dimensions)) valid = False reasoning.append( - "The following dimensions referenced by the compress attribute of variable {} do not exist: {}".format( - compress_var.name, - not_in_dims, - ), + f"The following dimensions referenced by the compress attribute of variable {compress_var.name} do not exist: {not_in_dims}", ) # IMPLEMENTATION CONFORMANCE 8.2 REQUIRED 3/3 # The values of the associated coordinate variable must be in the range diff --git a/compliance_checker/cf/cf_1_7.py b/compliance_checker/cf/cf_1_7.py index 1ff97520..14d6430c 100644 --- a/compliance_checker/cf/cf_1_7.py +++ b/compliance_checker/cf/cf_1_7.py @@ -167,9 +167,7 @@ def check_actual_range(self, ds): variable[:].min(), ) or not np.isclose(variable.actual_range[1], variable[:].max()): msgs.append( - "actual_range elements of '{}' inconsistent with its min/max values".format( - name, - ), + f"actual_range elements of '{name}' inconsistent with its min/max values", ) else: score += 1 @@ -181,9 +179,7 @@ def check_actual_range(self, ds): variable.actual_range[1] > variable.valid_range[1] ): msgs.append( - '"{}"\'s actual_range must be within valid_range'.format( - name, - ), + f'"{name}"\'s actual_range must be within valid_range', ) else: score += 1 @@ -194,10 +190,7 @@ def check_actual_range(self, ds): out_of += 1 if variable.actual_range[0] < variable.valid_min: msgs.append( - '"{}"\'s actual_range first element must be >= valid_min ({})'.format( - name, - variable.valid_min, - ), + f'"{name}"\'s actual_range first element must be >= valid_min ({variable.valid_min})', ) else: score += 1 @@ -205,10 +198,7 @@ def check_actual_range(self, ds): out_of += 1 if variable.actual_range[1] > variable.valid_max: msgs.append( - '"{}"\'s actual_range second element must be <= valid_max ({})'.format( - name, - variable.valid_max, - ), + f'"{name}"\'s actual_range second element must be <= valid_max ({variable.valid_max})', ) else: score += 1 @@ -254,10 +244,7 @@ def check_cell_boundaries(self, ds): if boundary_variable_name not in ds.variables: valid = False reasoning.append( - "Boundary variable {} referenced by {} not ".format( - boundary_variable_name, - variable.name, - ) + f"Boundary variable {boundary_variable_name} referenced by {variable.name} not " + "found in dataset variables", ) else: @@ -269,10 +256,7 @@ def check_cell_boundaries(self, ds): if boundary_variable.ndim < 2: valid = False reasoning.append( - "Boundary variable {} specified by {}".format( - boundary_variable.name, - variable.name, - ) + f"Boundary variable {boundary_variable.name} specified by {variable.name}" + " should have at least two dimensions to enclose the base " + "case of a one dimensionsal variable", ) @@ -318,10 +302,7 @@ def check_cell_boundaries(self, ds): if boundary_variable.dtype.kind not in "biufc": valid = False reasoning.append( - "Boundary variable {} specified by {}".format( - boundary_variable.name, - variable.name, - ) + f"Boundary variable {boundary_variable.name} specified by {variable.name}" + "must be a numeric data type ", ) @@ -352,10 +333,7 @@ def check_cell_boundaries(self, ds): if not hasattr(boundary_variable, "formula_terms"): valid = False reasoning.append( - "'{}' has 'formula_terms' attr, bounds variable '{}' must also have 'formula_terms'".format( - variable_name, - boundary_variable_name, - ), + f"'{variable_name}' has 'formula_terms' attr, bounds variable '{boundary_variable_name}' must also have 'formula_terms'", ) # 7.1 Recommendations 2/2 @@ -380,10 +358,7 @@ def check_cell_boundaries(self, ds): if unwanted_attributes: valid = False reasoning.append( - "The Boundary variables '{}' should not have the attributes: '{}'".format( - boundary_variable_name, - unwanted_attributes, - ), + f"The Boundary variables '{boundary_variable_name}' should not have the attributes: '{unwanted_attributes}'", ) result = Result( @@ -818,9 +793,7 @@ def check_grid_mapping(self, ds): pyproj.CRS.from_wkt(crs_wkt) except pyproj.exceptions.CRSError as crs_error: test_ctx.messages.append( - "Cannot parse crs_wkt attribute to CRS using Proj4. Proj4 error: {}".format( - str(crs_error), - ), + f"Cannot parse crs_wkt attribute to CRS using Proj4. Proj4 error: {str(crs_error)}", ) else: test_ctx.score += 1 @@ -943,10 +916,7 @@ def _check_dimensionless_vertical_coordinate_1_7( _comp_std_name = dim_vert_coords_dict[standard_name][1] correct_computed_std_name_ctx.assert_true( getattr(variable, "computed_standard_name", None) in _comp_std_name, - "§4.3.3 The standard_name of `{}` must map to the correct computed_standard_name, `{}`".format( - vname, - sorted(_comp_std_name), - ), + f"§4.3.3 The standard_name of `{vname}` must map to the correct computed_standard_name, `{sorted(_comp_std_name)}`", ) ret_val.append(correct_computed_std_name_ctx.to_result()) diff --git a/compliance_checker/cf/cf_base.py b/compliance_checker/cf/cf_base.py index 2e420c81..422815cf 100644 --- a/compliance_checker/cf/cf_base.py +++ b/compliance_checker/cf/cf_base.py @@ -206,26 +206,19 @@ def check_grid_mapping(self, ds): for grid_var_name, coord_var_str in re_all: defines_grid_mapping.assert_true( grid_var_name in ds.variables, - "grid mapping variable {} must exist in this dataset".format( - grid_var_name, - ), + f"grid mapping variable {grid_var_name} must exist in this dataset", ) for ref_var in coord_var_str.split(): defines_grid_mapping.assert_true( ref_var in ds.variables, - "Coordinate-related variable {} referenced by grid_mapping variable {} must exist in this dataset".format( - ref_var, - grid_var_name, - ), + f"Coordinate-related variable {ref_var} referenced by grid_mapping variable {grid_var_name} must exist in this dataset", ) else: for grid_var_name in grid_mapping.split(): defines_grid_mapping.assert_true( grid_var_name in ds.variables, - "grid mapping variable {} must exist in this dataset".format( - grid_var_name, - ), + f"grid mapping variable {grid_var_name} must exist in this dataset", ) ret_val[variable.name] = defines_grid_mapping.to_result() @@ -264,10 +257,7 @@ def check_grid_mapping(self, ds): for req in required_attrs: valid_grid_mapping.assert_true( hasattr(grid_var, req), - "{} is a required attribute for grid mapping {}".format( - req, - grid_mapping_name, - ), + f"{req} is a required attribute for grid mapping {grid_mapping_name}", ) # Make sure that exactly one of the exclusive attributes exist @@ -313,10 +303,7 @@ def check_conventions_version(self, ds): valid = False reasoning = [] - correct_version_string = "{}-{}".format( - self._cc_spec, - self._cc_spec_version, - ).upper() + correct_version_string = f"{self._cc_spec}-{self._cc_spec_version}".upper() if hasattr(ds, "Conventions"): conventions = regex.split(r",|\s+", getattr(ds, "Conventions", "")) for convention in conventions: @@ -682,10 +669,7 @@ def _find_cf_standard_name_table(self, ds): ) else: print( - "Using cached standard name table v{} from {}".format( - version, - location, - ), + f"Using cached standard name table v{version} from {location}", file=sys.stderr, ) @@ -1056,16 +1040,14 @@ def _att_loc_msg(att_loc): if att_loc_len == 1: valid_loc = att_loc_print_helper(loc_sort[0]) elif att_loc_len == 2: - valid_loc = "{} and {}".format( - att_loc_print_helper(loc_sort[0]), - att_loc_print_helper(loc_sort[1]), - ) + valid_loc = f"{att_loc_print_helper(loc_sort[0])} and {att_loc_print_helper(loc_sort[1])}" # shouldn't be reached under normal circumstances, as any attribute # should be either G, C, or D but if another # category is added, this will be useful. else: - valid_loc = ", ".join(loc_sort[:-1]) + ", and {}".format( - att_loc_print_helper(loc_sort[-1]), + valid_loc = ( + ", ".join(loc_sort[:-1]) + + f", and {att_loc_print_helper(loc_sort[-1])}" ) return f"This attribute may only appear in {valid_loc}." @@ -1188,10 +1170,7 @@ def _check_attr_type(self, attr_name, attr_type, attribute, variable=None): if temp_ctx.messages: return ( False, - "{} must be numeric and must be equivalent to {} dtype".format( - attr_name, - var_dtype, - ), + f"{attr_name} must be numeric and must be equivalent to {var_dtype} dtype", ) else: # If we reached here, we fell off with an unrecognized type diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index d57f7d86..fa87357e 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -292,18 +292,13 @@ def download_cf_standard_name_table(version, location=None): if version == "latest": url = "http://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml" else: - url = "http://cfconventions.org/Data/cf-standard-names/{}/src/cf-standard-name-table.xml".format( - version, - ) + url = f"http://cfconventions.org/Data/cf-standard-names/{version}/src/cf-standard-name-table.xml" r = requests.get(url, allow_redirects=True) r.raise_for_status() print( - "Downloading cf-standard-names table version {} from: {}".format( - version, - url, - ), + f"Downloading cf-standard-names table version {version} from: {url}", file=sys.stderr, ) with open(location, "wb") as f: diff --git a/compliance_checker/ioos.py b/compliance_checker/ioos.py index fe7dbbfd..3264843e 100644 --- a/compliance_checker/ioos.py +++ b/compliance_checker/ioos.py @@ -46,10 +46,7 @@ def _has_attr(cls, ds, attr, concept_name, priority=BaseCheck.HIGH): if not val: msgs.append( - "Attr '{}' (IOOS concept: '{}') not found in dataset".format( - attr, - concept_name, - ), + f"Attr '{attr}' (IOOS concept: '{concept_name}') not found in dataset", ) return Result(priority, val, concept_name, msgs) @@ -64,22 +61,14 @@ def _has_var_attr(cls, dataset, vname, attr, concept_name, priority=BaseCheck.HI if vname not in dataset.variables: val = False msgs.append( - "Variable '{}' not present while checking for attr '{}' for IOOS concept: '{}'".format( - vname, - attr, - concept_name, - ), + f"Variable '{vname}' not present while checking for attr '{attr}' for IOOS concept: '{concept_name}'", ) else: v = dataset.variables[vname] if attr not in v.ncattrs(): val = False msgs.append( - "Attr '{}' not present on var '{}' while checking for IOOS concept: '{}'".format( - attr, - vname, - concept_name, - ), + f"Attr '{attr}' not present on var '{vname}' while checking for IOOS concept: '{concept_name}'", ) return Result(priority, val, concept_name, msgs) @@ -796,9 +785,7 @@ def check_contributor_role_and_vocabulary(self, ds): False, "contributor_role_vocabulary", [ - "contributor_role_vocabulary '{}' must be of type 'string'".format( - vocb, - ), + f"contributor_role_vocabulary '{vocb}' must be of type 'string'", ], ), ) @@ -1302,9 +1289,7 @@ def check_single_platform(self, ds): num_platforms = len(platform_set) if num_platforms > 1 and glb_platform: - msg = "A dataset may only have one platform; {} found".format( - len(platform_set), - ) + msg = f"A dataset may only have one platform; {len(platform_set)} found" val = False elif (not glb_platform) and num_platforms > 0: @@ -1586,10 +1571,7 @@ def check_instrument_variables(self, ds): if instr in ds.variables: compnt = getattr(ds.variables[instr], "component", None) m = [ - "component attribute of {} ({}) must be a string".format( - instr, - compnt, - ), + f"component attribute of {instr} ({compnt}) must be a string", ] if compnt: results.append( @@ -1607,10 +1589,7 @@ def check_instrument_variables(self, ds): disct = getattr(ds.variables[instr], "discriminant", None) m = [ - "discriminant attribute of {} ({}) must be a string".format( - instr, - disct, - ), + f"discriminant attribute of {instr} ({disct}) must be a string", ] if disct: results.append( @@ -1711,9 +1690,7 @@ def check_qartod_variables_references(self, ds): ).format(v.name) val = False else: - msg = '"references" attribute for variable "{}" must be a valid URL'.format( - v.name, - ) + msg = f'"references" attribute for variable "{v.name}" must be a valid URL' val = bool(validators.url(attval)) results.append( diff --git a/compliance_checker/runner.py b/compliance_checker/runner.py index 114bcac1..0a8c6d2f 100644 --- a/compliance_checker/runner.py +++ b/compliance_checker/runner.py @@ -107,9 +107,7 @@ def run_checker( else: if len(output_format) > 1: # Update file name if needed - output_filename = "{}.txt".format( - os.path.splitext(output_filename)[0], - ) + output_filename = f"{os.path.splitext(output_filename)[0]}.txt" with open(output_filename, "w", encoding="utf-8") as f: with stdout_redirector(f): cls.stdout_output(cs, score_dict, verbose, limit) @@ -117,17 +115,13 @@ def run_checker( elif out_fmt == "html": # Update file name if needed if len(output_format) > 1 and output_filename != "-": - output_filename = "{}.html".format( - os.path.splitext(output_filename)[0], - ) + output_filename = f"{os.path.splitext(output_filename)[0]}.html" cls.html_output(cs, score_dict, output_filename, ds_loc, limit) elif out_fmt in {"json", "json_new"}: # Update file name if needed if len(output_format) > 1 and output_filename != "-": - output_filename = "{}.json".format( - os.path.splitext(output_filename)[0], - ) + output_filename = f"{os.path.splitext(output_filename)[0]}.json" cls.json_output(cs, score_dict, output_filename, ds_loc, limit, out_fmt) else: diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index 7f435208..5db72b67 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -644,11 +644,7 @@ def standard_output(self, ds, limit, check_name, groups): print("Corrective Actions".center(width)) plural = "" if issue_count == 1 else "s" print( - "{} has {} potential issue{}".format( - os.path.basename(ds), - issue_count, - plural, - ), + f"{os.path.basename(ds)} has {issue_count} potential issue{plural}", ) return [groups, points, out_of] diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index 70c8677c..367ffc6d 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -110,7 +110,7 @@ def test_coord_data_vars(self): "temp", np.float64, dimensions=("time",), - fill_value=float(99999999999999999999.0), + fill_value=99999999999999999999.0, ) temp.coordinates = "sigma noexist" ds.createVariable("sigma", np.float64, dimensions=("siglev",)) @@ -193,7 +193,7 @@ def test_check_child_attr_data_types(self): "temp", np.float64, dimensions=("time",), - fill_value=float(99999999999999999999.0), + fill_value=99999999999999999999.0, ) # give temp _FillValue as a float, expect good result @@ -2816,7 +2816,7 @@ def test_check_add_offset_scale_factor_type(self): # set same dtype dataset = MockTimeSeries() # time lat lon depth temp = dataset.createVariable("temp", int, dimensions=("time",)) - temp.setncattr("scale_factor", int(5)) + temp.setncattr("scale_factor", 5) r = self.cf.check_add_offset_scale_factor_type(dataset) self.assertTrue(r[1].value) self.assertFalse(r[1].msgs) diff --git a/compliance_checker/tests/test_cf_integration.py b/compliance_checker/tests/test_cf_integration.py index bb344f5e..5d162672 100644 --- a/compliance_checker/tests/test_cf_integration.py +++ b/compliance_checker/tests/test_cf_integration.py @@ -21,9 +21,7 @@ "attribute lat:_CoordianteAxisType should begin with a letter and be composed of letters, digits, and underscores", "attribute lon:_CoordianteAxisType should begin with a letter and be composed of letters, digits, and underscores", "§2.6.2 global attribute history should exist and be a non-empty string", - "standard_name temperature is not defined in Standard Name Table v{}. Possible close match(es): ['air_temperature', 'soil_temperature', 'snow_temperature']".format( - std_names._version, - ), + f"standard_name temperature is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['air_temperature', 'soil_temperature', 'snow_temperature']", "temperature's auxiliary coordinate specified by the coordinates attribute, precise_lat, is not a variable in this dataset", "temperature's auxiliary coordinate specified by the coordinates attribute, precise_lon, is not a variable in this dataset", ], @@ -45,30 +43,20 @@ "Attribute 'valid_range' (type: ) and parent variable 'wind_direction_qc' (type: ) must have equivalent datatypes", "Attribute 'valid_range' (type: ) and parent variable 'visibility_qc' (type: ) must have equivalent datatypes", '§2.6.1 Conventions global attribute does not contain "CF-1.8"', - "standard_name visibility is not defined in Standard Name Table v{}. Possible close match(es): ['visibility_in_air']".format( - std_names._version, - ), + f"standard_name visibility is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['visibility_in_air']", 'Standard name modifier "data_quality" for variable visibility_qc is not a valid modifier according to CF Appendix C', "standard_name wind_direction is not defined in Standard Name Table v{}. Possible close match(es): ['wind_to_direction', 'wind_from_direction', 'wind_gust_from_direction']".format( std_names._version, ), 'Standard name modifier "data_quality" for variable wind_direction_qc is not a valid modifier according to CF Appendix C', - "standard_name wind_gust is not defined in Standard Name Table v{}. Possible close match(es): ['y_wind_gust', 'x_wind_gust', 'wind_speed_of_gust']".format( - std_names._version, - ), + f"standard_name wind_gust is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['y_wind_gust', 'x_wind_gust', 'wind_speed_of_gust']", 'Standard name modifier "data_quality" for variable wind_gust_qc is not a valid modifier according to CF Appendix C', 'Standard name modifier "data_quality" for variable air_temperature_qc is not a valid modifier according to CF Appendix C', - "standard_name use_wind is not defined in Standard Name Table v{}. Possible close match(es): ['y_wind', 'x_wind']".format( - std_names._version, - ), - "standard_name barometric_pressure is not defined in Standard Name Table v{}. Possible close match(es): ['air_pressure', 'reference_pressure', 'barometric_altitude']".format( - std_names._version, - ), + f"standard_name use_wind is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['y_wind', 'x_wind']", + f"standard_name barometric_pressure is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['air_pressure', 'reference_pressure', 'barometric_altitude']", 'Standard name modifier "data_quality" for variable barometric_pressure_qc is not a valid modifier according to CF Appendix C', 'Standard name modifier "data_quality" for variable wind_speed_qc is not a valid modifier according to CF Appendix C', - "standard_name barometric_pressure is not defined in Standard Name Table v{}. Possible close match(es): ['air_pressure', 'reference_pressure', 'barometric_altitude']".format( - std_names._version, - ), + f"standard_name barometric_pressure is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['air_pressure', 'reference_pressure', 'barometric_altitude']", "CF recommends latitude variable 'lat' to use units degrees_north", "CF recommends longitude variable 'lon' to use units degrees_east", ], @@ -151,12 +139,8 @@ [ # TODO: referenced/relative time is treated like time units 'Units "hours since 2016-01-01T12:00:00Z" for variable time_offset must be convertible to canonical units "s"', - "standard_name cloud_cover is not defined in Standard Name Table v{}. Possible close match(es): ['land_cover', 'land_cover_lccs', 'cloud_albedo']".format( - std_names._version, - ), - "standard_name dew_point is not defined in Standard Name Table v{}. Possible close match(es): ['dew_point_depression', 'dew_point_temperature']".format( - std_names._version, - ), + f"standard_name cloud_cover is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['land_cover', 'land_cover_lccs', 'cloud_albedo']", + f"standard_name dew_point is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['dew_point_depression', 'dew_point_temperature']", ( "GRID is not a valid CF featureType. It must be one of point, timeseries, " "trajectory, profile, timeseriesprofile, trajectoryprofile" diff --git a/compliance_checker/tests/test_feature_detection.py b/compliance_checker/tests/test_feature_detection.py index 53665e94..11f88fdb 100644 --- a/compliance_checker/tests/test_feature_detection.py +++ b/compliance_checker/tests/test_feature_detection.py @@ -31,9 +31,7 @@ def test_timeseries(self): """ with Dataset(resources.STATIC_FILES["timeseries"]) as nc: for variable in util.get_geophysical_variables(nc): - assert util.is_timeseries(nc, variable), "{} is timeseries".format( - variable, - ) + assert util.is_timeseries(nc, variable), f"{variable} is timeseries" def test_multi_timeseries_orthogonal(self): """ @@ -63,9 +61,7 @@ def test_trajectory(self): """ with Dataset(resources.STATIC_FILES["trajectory"]) as nc: for variable in util.get_geophysical_variables(nc): - assert util.is_cf_trajectory(nc, variable), "{} is trajectory".format( - variable, - ) + assert util.is_cf_trajectory(nc, variable), f"{variable} is trajectory" def test_trajectory_single(self): """ From 38e9c180235870f0744e7c7271d985eae63c4627 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 1 Aug 2023 10:22:11 -0300 Subject: [PATCH 18/49] use a server that won't kick us out --- .../tests/cassettes/test_erddap.yaml | 258 ++++++++++++++++++ compliance_checker/tests/test_protocols.py | 2 +- 2 files changed, 259 insertions(+), 1 deletion(-) diff --git a/compliance_checker/tests/cassettes/test_erddap.yaml b/compliance_checker/tests/cassettes/test_erddap.yaml index 76bcf677..ac0c6b06 100644 --- a/compliance_checker/tests/cassettes/test_erddap.yaml +++ b/compliance_checker/tests/cassettes/test_erddap.yaml @@ -235,4 +235,262 @@ interactions: status: code: 200 message: '' +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate, br + Connection: + - keep-alive + User-Agent: + - python-requests/2.31.0 + method: HEAD + uri: https://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest + response: + body: + string: '' + headers: + Access-Control-Allow-Origin: + - '*' + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 7efe624eb969a4c9-GRU + Connection: + - keep-alive + Content-Type: + - text/plain + Date: + - Tue, 01 Aug 2023 13:21:24 GMT + Last-Modified: + - Tue, 01 Aug 2023 13:21:24 GMT + Location: + - http://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest.html + NEL: + - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' + Report-To: + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v3?s=viFC4Bf5eGlUkkeMsFQWDFwYf5ZHPxQk%2BosYpxTON6ysF7qwAvm48YPwpGBfcdwFtB10N5RtEvlB%2FMR3JMGTzwGub%2Fxz1BVqAMrzYtXCIPX7aAgm7NIYWtxS%2FCd4JOf5vEg%3D"}],"group":"cf-nel","max_age":604800}' + Server: + - cloudflare + erddap-server: + - '1.82' + xdods-server: + - dods/3.7 + status: + code: 302 + message: Found +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate, br + Connection: + - keep-alive + User-Agent: + - python-requests/2.31.0 + method: HEAD + uri: http://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest.html + response: + body: + string: '' + headers: + Access-Control-Allow-Origin: + - '*' + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 7efe6250bfeba6a0-GRU + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - text/html;charset=UTF-8 + Date: + - Tue, 01 Aug 2023 13:21:24 GMT + Last-Modified: + - Tue, 01 Aug 2023 13:21:24 GMT + NEL: + - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' + Report-To: + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v3?s=Q00158RZNE%2BKAQjfwcwy2tfC2LjllIghoAsK3G4CUs896HjG6euAyLq6IjMCA2hWGT6zvvxcSJkeOyXscutlR99zZ3ocSALXeNYjddkQlW597IEYvCvIdkP8az8o%2F7BdKSU%3D"}],"group":"cf-nel","max_age":604800}' + Server: + - cloudflare + erddap-server: + - '1.82' + xdods-server: + - dods/3.7 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate, br + Connection: + - keep-alive + User-Agent: + - python-requests/2.31.0 + method: HEAD + uri: https://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest + response: + body: + string: '' + headers: + Access-Control-Allow-Origin: + - '*' + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 7efe62530ea2a507-GRU + Connection: + - keep-alive + Content-Type: + - text/plain + Date: + - Tue, 01 Aug 2023 13:21:25 GMT + Last-Modified: + - Tue, 01 Aug 2023 13:21:25 GMT + Location: + - http://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest.html + NEL: + - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' + Report-To: + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v3?s=hmDpyHvmx5hrFB01AslIWEzp3Hl%2FubqT8Dr%2FoRriX9%2FkBnjpjEXWFvrlKPRoNNqJtNRMZku7TZrfY6XehkaqgfLfZHXf5exSFwOy7RKPQ%2FCwGMLgC2atVzMLfkdKKUd2A0s%3D"}],"group":"cf-nel","max_age":604800}' + Server: + - cloudflare + erddap-server: + - '1.82' + xdods-server: + - dods/3.7 + status: + code: 302 + message: Found +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate, br + Connection: + - keep-alive + User-Agent: + - python-requests/2.31.0 + method: HEAD + uri: http://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest.html + response: + body: + string: '' + headers: + Access-Control-Allow-Origin: + - '*' + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 7efe6254ea801ac9-GRU + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - text/html;charset=UTF-8 + Date: + - Tue, 01 Aug 2023 13:21:25 GMT + Last-Modified: + - Tue, 01 Aug 2023 13:21:25 GMT + NEL: + - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' + Report-To: + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v3?s=DQoHrp0kk2O6M%2BjNZy8EjrWe631YF8jIZJSXighA8gBWEnVLfcBQcZoLKCVDueG%2B08qYO4KL1rsEWsQP5tm04Xa%2Fd%2FvPf4jTjCFFmij2FNzNF6h3gEalkcC8SaAE48jHr5A%3D"}],"group":"cf-nel","max_age":604800}' + Server: + - cloudflare + erddap-server: + - '1.82' + xdods-server: + - dods/3.7 + status: + code: 200 + message: OK +- request: + body: null + headers: + Accept: + - '*/*' + Accept-Encoding: + - gzip, deflate, br + Connection: + - keep-alive + User-Agent: + - python-requests/2.31.0 + method: GET + uri: https://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest.das + response: + body: + string: !!binary | + g5UHAMQvm/Xe53J6xclBxI2s4+b8M9UfNUyXQRpLzQSHo3FUiIztIpgvXmq7O3MgGr07JyBJ0Yhw + yUaon8rbGlrXvjYkiLjozCsm7oc6Vf68yhH/zgCVkV8vBHjWIK5Hc+x9sOJI+XAt8WWzYCQvMnLy + C+lLzgZPursNanWioVk068JltrO9tV1X+/xzbX4zw+7uzi7/XP/Cf0ZaS0TykvwhUryPjYzq5gTR + XAfv+sbRyASQM3HWWyNVRm6ivq2PpCjTq8O7tKz3ChTFwRciSdLJiUYkE6p7REzhv2hZ7xVpUaZF + +VKktPm0pcf8/wwYSEUnq8oW35DeblkVWWmws5OVYnHNGfwjGCyRIVl7djeSEjmmNFj8QCjUgvP8 + u8dmQnpJA23jXX+HMmZ/vl1ab2WlQbqzkxX1p8LbpcRVYpwfQHSCitRJDiiplwimqDfbed0S2fwL + WxWaMxmGNxomRlpnda0EiFXwUY4o3NJaxmlEWWQFlY44vAltU8bw8J6n0FHLeKclRxXM9yy9k05a + cko9Glyw9HNNQxlIF5GpCbEyQrOiJTcjqqpr5jyEqhxGe2l9XC0UlG+5oyWdexf474lduxFQI5rb + SnCva93ThSOKr2VbuwXZf/OCyxbGR4yZQZt7rj56oL6xqVUDD200TIhwN//PgLvj5vzm/ujwxmvM + 09qxsaTUaE3v4jyILSJ5x0Sv06l4F5Ec3x8+nTwbHJ+lZbZr8OrEkhJOSCmy4kRidI+hNliWGUBD + gAw3H2ro8R6CuFYWNODSLTmq9KQ+fEl+PwkZmhTj8MAhCLM6whQGJHPVxUGer1ar7Lv4rG+zlvI8 + QFlKiyR+mG70UZtTiiquhyowgB/s2ccFqdDQDKTNSGt4zxGhizhPkPOzR+CBf51RZJWpUCxzht45 + EMLFO0nzLncu3gnO8Dvn3PNCcjO9Q4jFqgMjTv/NSKqi2kqL/bQoX8qtg6o8qHY+4YfBtzSgk4Hj + jzMyudz36zhQ633MfOhzDtbSIu+DfO/z/r5V5G917ClqM5By1MwSAA0lrvOv1qRwJlOERhUtwZmT + I7adDxZtykT3LZPzgWko3Nr2L96sfLARiSUlg1rsZ2CBYRqDUwo6x5F5CG0Zv7O0jz9wfGkJfocX + yBseDICGe14w2/xEFARcezPu2MWgRBIa+JbJfRsnGkSmewKvs/Eq2REq8FXRwBEdwiD6KbScGcQF + szVwBvhDXKxoydHc81sJopYLNK3yJEjOj29PlGJ3TUXYAYyrjoPkZc6AtMQjbfCdEf5pBHIWgcPw + XDFbdD6gC8z4PikkwnmFOGVn2c46HzBwTwOmyKYZ6iLKdonWOyVxEBe1+OatcMxwx6JzDnuwm86S + 0uzYu6b+xj4YnD6dGNzdHx4aOE9IhBwb7llJOaK7wh87ktvMDulFAwlY43Qc5ggfRMgaqFUfokHQ + rIHkNljt3WwH3RjELbpL+DCTcTEI2//MELSJhSN8h66bHGXpBuQsOlHHccHY/jRbRDOKnAYKGEg9 + wMAHHDoJsXmuLPdBGNl1uRQq8t+YWXcoxHGMLBJMkbtp+NXwHXQuER2kIrIqHnLLSh4unz0ZoKO6 + YgfwKewXzYjr2/7cohm8qLhGccQx+bpIngTOSqtAtfGhjs/SsmpC+Ezj+Pd5jy7v8f6Oy8tL3GDg + PDjzgVuKGj2jDfCRqBCs18X8+LrUc8POIqmKcj8tKyQuVHWicpIQYmjBgwXLtKrbUgcWFZwUDPEv + PHp28GXB/8/+nxkD + headers: + Access-Control-Allow-Origin: + - '*' + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 7efe62577bcc011e-GRU + Connection: + - keep-alive + Content-Encoding: + - br + Content-Type: + - text/plain;charset=ISO-8859-1 + Date: + - Tue, 01 Aug 2023 13:21:25 GMT + Last-Modified: + - Tue, 01 Aug 2023 13:21:25 GMT + NEL: + - '{"success_fraction":0,"report_to":"cf-nel","max_age":604800}' + Report-To: + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v3?s=d1lok8SZWJNI1nACRaycX0%2B1YmNtOjflDeTI2QaKJx2Fj2dgrCIzgLl9tG%2B0hQzTS7p43sjGBPgPtyDVLG4Iaw37wW8GyZU5f70NwBVA0dew%2BEQRdM0nNMNpJYOfXx9BJ6U%3D"}],"group":"cf-nel","max_age":604800}' + Server: + - cloudflare + Transfer-Encoding: + - chunked + content-description: + - dods_das + erddap-server: + - '1.82' + xdods-server: + - dods/3.7 + status: + code: 200 + message: OK version: 1 diff --git a/compliance_checker/tests/test_protocols.py b/compliance_checker/tests/test_protocols.py index a0aed220..f43bce68 100644 --- a/compliance_checker/tests/test_protocols.py +++ b/compliance_checker/tests/test_protocols.py @@ -28,7 +28,7 @@ def test_erddap(): """ Tests that a connection can be made to ERDDAP's GridDAP """ - url = "https://coastwatch.pfeg.noaa.gov/erddap/griddap/osu2ChlaAnom" + url = "https://www.neracoos.org/erddap/griddap/WW3_EastCoast_latest" cs = CheckSuite() ds = cs.load_dataset(url) assert ds is not None From d3c797612a4849e80d9151149766e214b7cc383e Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 5 Sep 2023 14:50:11 -0300 Subject: [PATCH 19/49] update pre-commits --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0004e6ef..e8b99575 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,18 +25,18 @@ repos: language_version: python3 - repo: https://github.com/asottile/add-trailing-comma - rev: v3.0.1 + rev: v3.1.0 hooks: - id: add-trailing-comma - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.281 + rev: v0.0.287 hooks: - id: ruff - repo: https://github.com/tox-dev/pyproject-fmt - rev: 0.13.0 + rev: 1.1.0 hooks: - id: pyproject-fmt From ac2f83b09a3d8aa29a1109ff58626790b43d1b99 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 5 Sep 2023 14:50:34 -0300 Subject: [PATCH 20/49] run ruff fix --- cchecker.py | 4 +- compliance_checker/acdd.py | 20 +-- compliance_checker/cf/cf_1_6.py | 188 ++++++++++++-------------- compliance_checker/cf/cf_1_7.py | 51 ++----- compliance_checker/cf/cf_base.py | 40 ++---- compliance_checker/ioos.py | 16 +-- compliance_checker/suite.py | 7 +- compliance_checker/tests/test_acdd.py | 2 +- 8 files changed, 134 insertions(+), 194 deletions(-) diff --git a/cchecker.py b/cchecker.py index 72d022df..fd3ef88b 100755 --- a/cchecker.py +++ b/cchecker.py @@ -253,8 +253,8 @@ def main(): for checker_name in sorted(checker_names): if checker_name not in check_suite.checkers: print( - "Cannot find checker '{}' with which to " - "describe checks".format(checker_name), + f"Cannot find checker '{checker_name}' with which to " + "describe checks", file=sys.stderr, ) error_stat = 1 diff --git a/compliance_checker/acdd.py b/compliance_checker/acdd.py index 6ea610ac..9b0abdc7 100644 --- a/compliance_checker/acdd.py +++ b/compliance_checker/acdd.py @@ -249,8 +249,8 @@ def check_lat_extents(self, ds): False, "geospatial_lat_extents_match", [ - "Could not convert one of geospatial_lat_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1" - "".format(ds.geospatial_lat_min, ds.geospatial_lat_max), + f"Could not convert one of geospatial_lat_min ({ds.geospatial_lat_min}) or max ({ds.geospatial_lat_max}) to float see CF-1.6 spec chapter 4.1" + "", ], ) @@ -347,8 +347,8 @@ def check_lon_extents(self, ds): False, "geospatial_lon_extents_match", [ - "Could not convert one of geospatial_lon_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1" - "".format(ds.geospatial_lon_min, ds.geospatial_lon_max), + f"Could not convert one of geospatial_lon_min ({ds.geospatial_lon_min}) or max ({ds.geospatial_lon_max}) to float see CF-1.6 spec chapter 4.1" + "", ], ) @@ -440,7 +440,7 @@ def verify_geospatial_bounds(self, ds): [ ( "Could not parse WKT from geospatial_bounds," - ' possible bad value: "{}"'.format(ds.geospatial_bounds) + f' possible bad value: "{ds.geospatial_bounds}"' ), ], variable_name="geospatial_bounds", @@ -637,19 +637,13 @@ def check_time_extents(self, ds): if start_dt > timedelta(hours=1): msgs.append( "Date time mismatch between time_coverage_start and actual " - "time values {} (time_coverage_start) != {} (time[0])".format( - t_min.isoformat(), - time0.isoformat(), - ), + f"time values {t_min.isoformat()} (time_coverage_start) != {time0.isoformat()} (time[0])", ) score -= 1 if end_dt > timedelta(hours=1): msgs.append( "Date time mismatch between time_coverage_end and actual " - "time values {} (time_coverage_end) != {} (time[N])".format( - t_max.isoformat(), - time1.isoformat(), - ), + f"time values {t_max.isoformat()} (time_coverage_end) != {time1.isoformat()} (time[N])", ) score -= 1 diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 2cc5a74d..63b00337 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -238,8 +238,8 @@ def check_naming_conventions(self, ds): for name, variable in ds.variables.items(): variable_naming.assert_true( rname.match(name) is not None, - "variable {} should begin with a letter and be composed of " - "letters, digits, and underscores".format(name), + f"variable {name} should begin with a letter and be composed of " + "letters, digits, and underscores", ) # Keep track of all the attributes, we'll need to check them @@ -254,8 +254,8 @@ def check_naming_conventions(self, ds): continue attribute_naming.assert_true( rname.match(attr) is not None, - "attribute {}:{} should begin with a letter and be composed of " - "letters, digits, and underscores".format(name, attr), + f"attribute {name}:{attr} should begin with a letter and be composed of " + "letters, digits, and underscores", ) ret_val.append(variable_naming.to_result()) @@ -263,8 +263,8 @@ def check_naming_conventions(self, ds): for dimension in ds.dimensions: dimension_naming.assert_true( rname.match(dimension) is not None, - "dimension {} should begin with a latter and be composed of " - "letters, digits, and underscores".format(dimension), + f"dimension {dimension} should begin with a latter and be composed of " + "letters, digits, and underscores", ) ret_val.append(dimension_naming.to_result()) @@ -276,8 +276,8 @@ def check_naming_conventions(self, ds): continue attribute_naming.assert_true( rname.match(global_attr) is not None, - "global attribute {} should begin with a letter and be composed of " - "letters, digits, and underscores".format(global_attr), + f"global attribute {global_attr} should begin with a letter and be composed of " + "letters, digits, and underscores", ) ret_val.append(attribute_naming.to_result()) @@ -449,10 +449,8 @@ def check_valid_range_or_valid_min_max_present(self, ds): total = total + 1 fails.append( - "For the variable {} the valid_range attribute must not be present " - "if the valid_min and/or valid_max attributes are present".format( - variable.name, - ), + f"For the variable {variable.name} the valid_range attribute must not be present " + "if the valid_min and/or valid_max attributes are present", ) return Result( @@ -526,8 +524,8 @@ def check_fill_value_outside_valid_range(self, ds): valid_fill_range.assert_true( valid, - "{}:_FillValue ({}) should be outside the range specified by {} ({}, {})" - "".format(name, fill_value, spec_by, rmin, rmax), + f"{name}:_FillValue ({fill_value}) should be outside the range specified by {spec_by} ({rmin}, {rmax})" + "", ) return valid_fill_range.to_result() @@ -552,8 +550,8 @@ def check_convention_globals(self, ds): is_string = isinstance(dataset_attr, str) valid_globals.assert_true( is_string and len(dataset_attr), - "§2.6.2 global attribute {} should exist and be a non-empty string" # subsection message - "".format(attr), + f"§2.6.2 global attribute {attr} should exist and be a non-empty string" # subsection message + "", ) return valid_globals.to_result() @@ -606,8 +604,7 @@ def check_convention_possibly_var_attrs(self, ds): is_string = isinstance(varattr, str) valid_attributes.assert_true( is_string and len(varattr) > 0, - "§2.6.2 {}:{} should be a non-empty string" - "".format(name, attribute), + f"§2.6.2 {name}:{attribute} should be a non-empty string" "", ) attr_bin.add(attribute) @@ -618,8 +615,8 @@ def check_convention_possibly_var_attrs(self, ds): is_string = isinstance(dsattr, str) valid_attributes.assert_true( is_string and len(dsattr) > 0, - "§2.6.2 {} global attribute should be a non-empty string" - "".format(attribute), + f"§2.6.2 {attribute} global attribute should be a non-empty string" + "", ) attr_bin.add(attribute) return valid_attributes.to_result() @@ -926,7 +923,7 @@ def _check_valid_standard_units(self, ds, variable_name): valid_standard_units.assert_true( util.units_convertible(units, "seconds since 1970-01-01"), "time must be in a valid units format since " - "not {}".format(units), + f"not {units}", ) # UDunits can't tell the difference between east and north facing coordinates @@ -935,9 +932,9 @@ def _check_valid_standard_units(self, ds, variable_name): allowed_units = cfutil.VALID_LAT_UNITS | {"degrees"} valid_standard_units.assert_true( (units.lower() if units is not None else None) in allowed_units, - 'variables defining latitude ("{}") must use degrees_north ' + f'variables defining latitude ("{variable_name}") must use degrees_north ' "or degrees if defining a transformed grid. Currently " - "{}".format(variable_name, units), + f"{units}", ) # UDunits can't tell the difference between east and north facing coordinates elif standard_name == "longitude": @@ -945,9 +942,9 @@ def _check_valid_standard_units(self, ds, variable_name): allowed_units = cfutil.VALID_LON_UNITS | {"degrees"} valid_standard_units.assert_true( (units.lower() if units is not None else None) in allowed_units, - 'variables defining longitude ("{}") must use degrees_east ' + f'variables defining longitude ("{variable_name}") must use degrees_east ' "or degrees if defining a transformed grid. Currently " - "{}".format(variable_name, units), + f"{units}", ) return valid_standard_units.to_result() @@ -1048,11 +1045,8 @@ def check_standard_name(self, ds): valid_modifier = TestCtx(BaseCheck.HIGH, self.section_titles["3.3"]) valid_modifier.assert_true( standard_name_modifier in valid_modifiers, - 'Standard name modifier "{}" for variable {} is not a valid modifier ' - "according to CF Appendix C".format( - standard_name_modifier, - name, - ), + f'Standard name modifier "{standard_name_modifier}" for variable {name} is not a valid modifier ' + "according to CF Appendix C", ) ret_val.append(valid_modifier.to_result()) @@ -1095,8 +1089,7 @@ def check_ancillary_variables(self, ds): valid_ancillary.assert_true( isinstance(ancillary_variables, str), - "ancillary_variables attribute defined by {} " - "should be string".format(name), + f"ancillary_variables attribute defined by {name} " "should be string", ) # Can't perform the second check if it's not a string @@ -1233,8 +1226,8 @@ def _check_flag_values(self, ds, name): # the data type for flag_values should be the same as the variable valid_values.assert_true( variable.dtype.type == flag_values.dtype.type, - "flag_values ({}) must be the same data type as {} ({})" - "".format(flag_values.dtype.type, name, variable.dtype.type), + f"flag_values ({flag_values.dtype.type}) must be the same data type as {name} ({variable.dtype.type})" + "", ) # IMPLEMENTATION CONFORMANCE 3.5 REQUIRED 4/8 @@ -1270,8 +1263,8 @@ def _check_flag_masks(self, ds, name): valid_masks.assert_true( variable.dtype.type == flag_masks.dtype.type, - "flag_masks ({}) must be the same data type as {} ({})" - "".format(flag_masks.dtype.type, name, variable.dtype.type), + f"flag_masks ({flag_masks.dtype.type}) must be the same data type as {name} ({variable.dtype.type})" + "", ) type_ok = ( @@ -1497,14 +1490,14 @@ def check_latitude(self, ds): # but are convertible to angular units allowed_units.assert_true( units not in e_n_units and Unit(units) == Unit("degree"), - "Grid latitude variable '{}' should use degree equivalent units without east or north components. " - "Current units are {}".format(latitude, units), + f"Grid latitude variable '{latitude}' should use degree equivalent units without east or north components. " + f"Current units are {units}", ) else: allowed_units.assert_true( units_is_string and units.lower() in allowed_lat_units, - "latitude variable '{}' should define valid units for latitude" - "".format(latitude), + f"latitude variable '{latitude}' should define valid units for latitude" + "", ) ret_val.append(allowed_units.to_result()) @@ -1513,8 +1506,8 @@ def check_latitude(self, ds): # This is only a recommendation and we won't penalize but we # will include a recommended action. msg = ( - "CF recommends latitude variable '{}' to use units degrees_north" - "".format(latitude) + f"CF recommends latitude variable '{latitude}' to use units degrees_north" + "" ) recommended_units = Result( BaseCheck.LOW, @@ -1529,8 +1522,8 @@ def check_latitude(self, ds): definition = TestCtx(BaseCheck.MEDIUM, self.section_titles["4.1"]) definition.assert_true( standard_name == "latitude" or axis == "Y" or y_variables != [], - "latitude variable '{}' should define standard_name='latitude' or axis='Y'" - "".format(latitude), + f"latitude variable '{latitude}' should define standard_name='latitude' or axis='Y'" + "", ) ret_val.append(definition.to_result()) @@ -1606,14 +1599,14 @@ def check_longitude(self, ds): # but are convertible to angular units allowed_units.assert_true( units not in e_n_units and Unit(units) == Unit("degree"), - "Grid longitude variable '{}' should use degree equivalent units without east or north components. " - "Current units are {}".format(longitude, units), + f"Grid longitude variable '{longitude}' should use degree equivalent units without east or north components. " + f"Current units are {units}", ) else: allowed_units.assert_true( units_is_string and units.lower() in allowed_lon_units, - "longitude variable '{}' should define valid units for longitude" - "".format(longitude), + f"longitude variable '{longitude}' should define valid units for longitude" + "", ) ret_val.append(allowed_units.to_result()) @@ -1622,8 +1615,8 @@ def check_longitude(self, ds): # This is only a recommendation and we won't penalize but we # will include a recommended action. msg = ( - "CF recommends longitude variable '{}' to use units degrees_east" - "".format(longitude) + f"CF recommends longitude variable '{longitude}' to use units degrees_east" + "" ) recommended_units = Result( BaseCheck.LOW, @@ -1638,8 +1631,8 @@ def check_longitude(self, ds): definition = TestCtx(BaseCheck.MEDIUM, self.section_titles["4.2"]) definition.assert_true( standard_name == "longitude" or axis == "X" or x_variables != [], - "longitude variable '{}' should define standard_name='longitude' or axis='X'" - "".format(longitude), + f"longitude variable '{longitude}' should define standard_name='longitude' or axis='X'" + "", ) ret_val.append(definition.to_result()) @@ -1693,15 +1686,15 @@ def check_dimensional_vertical_coordinate( valid_vertical_coord = TestCtx(BaseCheck.HIGH, self.section_titles["4.3"]) valid_vertical_coord.assert_true( isinstance(units, str) and units, - "§4.3.1 {}'s units must be defined for vertical coordinates, " - "there is no default".format(name), + f"§4.3.1 {name}'s units must be defined for vertical coordinates, " + "there is no default", ) if not util.units_convertible("bar", units): valid_vertical_coord.assert_true( positive in ("up", "down"), - "{}: vertical coordinates not defining pressure must include " - "a positive attribute that is either 'up' or 'down'".format(name), + f"{name}: vertical coordinates not defining pressure must include " + "a positive attribute that is either 'up' or 'down'", ) # _check_valid_standard_units, part of the Chapter 3 checks, @@ -1740,8 +1733,7 @@ def _check_dimensionless_vertical_coordinate_1_6( is_not_deprecated.assert_true( units not in deprecated_units, - "§4.3.2: units are deprecated by CF in variable {}: {}" - "".format(vname, units), + f"§4.3.2: units are deprecated by CF in variable {vname}: {units}" "", ) # check the vertical coordinates @@ -2144,9 +2136,9 @@ def check_aux_coordinates(self, ds): for aux_coord in coordinates.split(): valid_aux_coords.assert_true( aux_coord in ds.variables, - "{}'s auxiliary coordinate specified by the coordinates attribute, {}, " + f"{name}'s auxiliary coordinate specified by the coordinates attribute, {aux_coord}, " "is not a variable in this dataset" - "".format(name, aux_coord), + "", ) if aux_coord not in ds.variables: continue @@ -2246,7 +2238,7 @@ def check_multi_dimensional_coords(self, ds): not_matching.assert_true( coord not in variable.dimensions, - "{} shares the same name as one of its dimensions" "".format(coord), + f"{coord} shares the same name as one of its dimensions" "", ) ret_val.append(not_matching.to_result()) @@ -2376,8 +2368,7 @@ def check_reduced_horizontal_grid(self, ds): # Make sure reduced grid features define coordinates valid_rgrid.assert_true( isinstance(coords, str) and coords, - "reduced grid feature {} must define coordinates attribute" - "".format(name), + f"reduced grid feature {name} must define coordinates attribute" "", ) # We can't check anything else if there are no defined coordinates if not isinstance(coords, str) and coords: @@ -2405,16 +2396,16 @@ def check_reduced_horizontal_grid(self, ds): compress = getattr(coord, "compress", None) valid_rgrid.assert_true( isinstance(compress, str) and compress, - "compress attribute for compression coordinate {} must be a non-empty string" - "".format(compressed_coord), + f"compress attribute for compression coordinate {compressed_coord} must be a non-empty string" + "", ) if not isinstance(compress, str): continue for dim in compress.split(): valid_rgrid.assert_true( dim in ds.dimensions, - "dimension {} referenced by {}:compress must exist" - "".format(dim, compressed_coord), + f"dimension {dim} referenced by {compressed_coord}:compress must exist" + "", ) ret_val.append(valid_rgrid.to_result()) @@ -2754,21 +2745,15 @@ def check_cell_boundaries(self, ds): if boundary_variable.ndim != variable.ndim + 1: valid = False reasoning.append( - "The number of dimensions of the variable {} is {}, but the " - "number of dimensions of the boundary variable {} is {}. The boundary variable " - "should have {} dimensions".format( - variable.name, - variable.ndim, - boundary_variable.name, - boundary_variable.ndim, - variable.ndim + 1, - ), + f"The number of dimensions of the variable {variable.name} is {variable.ndim}, but the " + f"number of dimensions of the boundary variable {boundary_variable.name} is {boundary_variable.ndim}. The boundary variable " + f"should have {variable.ndim + 1} dimensions", ) if variable.dimensions[:] != boundary_variable.dimensions[: variable.ndim]: valid = False reasoning.append( - "Boundary variable coordinates (for {}) are in improper order: {}. Bounds-specific dimensions should be last" - "".format(variable.name, boundary_variable.dimensions), + f"Boundary variable coordinates (for {variable.name}) are in improper order: {boundary_variable.dimensions}. Bounds-specific dimensions should be last" + "", ) # ensure p vertices form a valid simplex given previous a...n @@ -2805,11 +2790,11 @@ def _cell_measures_core(self, ds, var, external_set, variable_template): if not search_res: valid = False reasoning.append( - "The cell_measures attribute for variable {} " + f"The cell_measures attribute for variable {var.name} " "is formatted incorrectly. It should take the " "form of either 'area: cell_var' or " "'volume: cell_var' where cell_var is an existing name of " - "a variable describing the cell measures.".format(var.name), + "a variable describing the cell measures.", ) else: valid = True @@ -2842,8 +2827,8 @@ def _cell_measures_core(self, ds, var, external_set, variable_template): if not hasattr(cell_measure_var, "units"): valid = False reasoning.append( - "Cell measure variable {} is required " - "to have units attribute defined".format(cell_measure_var_name), + f"Cell measure variable {cell_measure_var_name} is required " + "to have units attribute defined", ) else: # IMPLEMENTATION CONFORMANCE REQUIRED 2/2 @@ -2870,12 +2855,9 @@ def _cell_measures_core(self, ds, var, external_set, variable_template): if not set(cell_measure_var.dimensions).issubset(var.dimensions): valid = False reasoning.append( - "Cell measure variable {} must have " + f"Cell measure variable {cell_measure_var_name} must have " "dimensions which are a subset of " - "those defined in variable {}.".format( - cell_measure_var_name, - var.name, - ), + f"those defined in variable {var.name}.", ) return Result(BaseCheck.MEDIUM, valid, (self.section_titles["7.2"]), reasoning) @@ -2954,8 +2936,8 @@ def check_cell_methods(self, ds): ) # changed from 7.1 to 7.3 valid_attribute.assert_true( regex.match(psep, method) is not None, - '"{}" is not a valid format for cell_methods attribute of "{}"' - "".format(method, var.name), + f'"{method}" is not a valid format for cell_methods attribute of "{var.name}"' + "", ) ret_val.append(valid_attribute.to_result()) @@ -2979,8 +2961,8 @@ def check_cell_methods(self, ds): valid_cell_names.assert_true( valid, - "{}'s cell_methods name component {} does not match a dimension, " - "area or auxiliary coordinate".format(var.name, var_str), + f"{var.name}'s cell_methods name component {var_str} does not match a dimension, " + "area or auxiliary coordinate", ) ret_val.append(valid_cell_names.to_result()) @@ -3284,9 +3266,9 @@ def check_climatological_statistics(self, ds): "|".join(methods), ) # "or" comparison for the methods re_string = ( - r"^time: {0} within (years|days)" # regex string to test - r" time: {0} over \1(?<=days)(?: time: {0} over years)?" - r"(?: \([^)]+\))?$".format(meth_regex) + rf"^time: {meth_regex} within (years|days)" # regex string to test + rf" time: {meth_regex} over \1(?<=days)(?: time: {meth_regex} over years)?" + r"(?: \([^)]+\))?$" ) # find any variables with a valid climatological cell_methods @@ -3526,10 +3508,8 @@ def check_compression_gathering(self, ds): if coord_size not in range(0, upper_limit_size): valid = False reasoning.append( - "The dimenssion size {} referenced by the compress attribute is not " - "in the range (0, The product of the compressed dimension sizes minus 1)".format( - coord_size, - ), + f"The dimenssion size {coord_size} referenced by the compress attribute is not " + "in the range (0, The product of the compressed dimension sizes minus 1)", ) result = Result( BaseCheck.MEDIUM, @@ -3645,8 +3625,8 @@ def check_variable_features(self, ds): matching_feature = TestCtx(BaseCheck.MEDIUM, self.section_titles["9.1"]) matching_feature.assert_true( variable_feature.lower() == _feature, - "{} is not a {}, it is detected as a {}" - "".format(name, _feature, variable_feature), + f"{name} is not a {_feature}, it is detected as a {variable_feature}" + "", ) ret_val.append(matching_feature.to_result()) @@ -3661,8 +3641,8 @@ def check_variable_features(self, ds): all_same_features = TestCtx(BaseCheck.HIGH, self.section_titles["9.1"]) all_same_features.assert_true( len(feature_types_found) < 2, - "Different feature types discovered in this dataset: {}" - "".format(feature_description), + f"Different feature types discovered in this dataset: {feature_description}" + "", ) ret_val.append(all_same_features.to_result()) @@ -3696,8 +3676,8 @@ def _check_hint_bounds(self, ds): for name in ds.variables: if name.endswith("_bounds") and name not in boundary_variables: msg = ( - "{} might be a cell boundary variable but there are no variables that define it " - "as a boundary using the `bounds` attribute.".format(name) + f"{name} might be a cell boundary variable but there are no variables that define it " + "as a boundary using the `bounds` attribute." ) result = Result(BaseCheck.LOW, True, self.section_titles["7.1"], [msg]) ret_val.append(result) diff --git a/compliance_checker/cf/cf_1_7.py b/compliance_checker/cf/cf_1_7.py index 14d6430c..e47ca240 100644 --- a/compliance_checker/cf/cf_1_7.py +++ b/compliance_checker/cf/cf_1_7.py @@ -263,21 +263,15 @@ def check_cell_boundaries(self, ds): if boundary_variable.ndim != variable.ndim + 1: valid = False reasoning.append( - "The number of dimensions of the variable {} is {}, but the " - "number of dimensions of the boundary variable {} is {}. The boundary variable " - "should have {} dimensions".format( - variable.name, - variable.ndim, - boundary_variable.name, - boundary_variable.ndim, - variable.ndim + 1, - ), + f"The number of dimensions of the variable {variable.name} is {variable.ndim}, but the " + f"number of dimensions of the boundary variable {boundary_variable.name} is {boundary_variable.ndim}. The boundary variable " + f"should have {variable.ndim + 1} dimensions", ) if variable.dimensions[:] != boundary_variable.dimensions[: variable.ndim]: valid = False reasoning.append( - "Boundary variable coordinates (for {}) are in improper order: {}. Bounds-specific dimensions should be last" - "".format(variable.name, boundary_variable.dimensions), + f"Boundary variable coordinates (for {variable.name}) are in improper order: {boundary_variable.dimensions}. Bounds-specific dimensions should be last" + "", ) # 7.1 Required 2/5: continue @@ -315,15 +309,9 @@ def check_cell_boundaries(self, ds): if getattr(variable, item) != getattr(boundary_variable, item): valid = False reasoning.append( - "'{}' has attr '{}' with value '{}' that does not agree " - "with its associated variable ('{}')'s attr value '{}'" - "".format( - boundary_variable_name, - item, - getattr(boundary_variable, item), - variable.name, - getattr(variable, item), - ), + f"'{boundary_variable_name}' has attr '{item}' with value '{getattr(boundary_variable, item)}' that does not agree " + f"with its associated variable ('{variable.name}')'s attr value '{getattr(variable, item)}'" + "", ) # 7.1 Required 5/5: @@ -396,14 +384,9 @@ def check_cell_boundaries_interval(self, ds): ): valid = False reasoning.append( - "The points specified by the coordinate variable {} ({})" + f"The points specified by the coordinate variable {variable_name} ({variable[ii]})" " lie outside the boundary of the cell specified by the " - "associated boundary variable {} ({})".format( - variable_name, - variable[ii], - boundary_variable_name, - boundary_variable[ii], - ), + f"associated boundary variable {boundary_variable_name} ({boundary_variable[ii]})", ) result = Result( @@ -819,7 +802,7 @@ def check_grid_mapping(self, ds): test_ctx.messages.append( "Cannot have both 'geoid_name' and " "'geopotential_datum_name' attributes in " - "grid mapping variable '{}'".format(var.name), + f"grid mapping variable '{var.name}'", ) elif len_vdatum_name_attrs == 1: # should be one or zero attrs @@ -834,20 +817,16 @@ def check_grid_mapping(self, ds): ) invalid_msg = ( - "Vertical datum value '{}' for " - "attribute '{}' in grid mapping " - "variable '{}' is not valid".format( - v_datum_value, - v_datum_attr, - var.name, - ) + f"Vertical datum value '{v_datum_value}' for " + f"attribute '{v_datum_attr}' in grid mapping " + f"variable '{var.name}' is not valid" ) test_ctx.assert_true(v_datum_str_valid, invalid_msg) except sqlite3.Error as e: # if we hit an error, skip the check warn( "Error occurred while trying to query " - "Proj4 SQLite database at {}: {}".format(proj_db_path, str(e)), + f"Proj4 SQLite database at {proj_db_path}: {str(e)}", stacklevel=2, ) prev_return[var.name] = test_ctx.to_result() diff --git a/compliance_checker/cf/cf_base.py b/compliance_checker/cf/cf_base.py index 422815cf..7fd1d978 100644 --- a/compliance_checker/cf/cf_base.py +++ b/compliance_checker/cf/cf_base.py @@ -185,8 +185,8 @@ def check_grid_mapping(self, ds): ) defines_grid_mapping.assert_true( (isinstance(grid_mapping, str) and grid_mapping), - "{}'s grid_mapping attribute must be a " - "space-separated non-empty string".format(variable.name), + f"{variable.name}'s grid_mapping attribute must be a " + "space-separated non-empty string", ) if isinstance(grid_mapping, str): # TODO (badams): refactor functionality to split functionality @@ -313,7 +313,7 @@ def check_conventions_version(self, ds): else: reasoning = [ "§2.6.1 Conventions global attribute does not contain " - '"{}"'.format(correct_version_string), + f'"{correct_version_string}"', ] else: valid = False @@ -375,8 +375,8 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict): valid_formula_terms.assert_true( isinstance(formula_terms, str) and formula_terms, - "§4.3.2: {}'s formula_terms is a required attribute and must be a non-empty string" - "".format(coord), + f"§4.3.2: {coord}'s formula_terms is a required attribute and must be a non-empty string" + "", ) # We can't check any more if not formula_terms: @@ -417,16 +417,16 @@ def _check_formula_terms(self, ds, coord, dimless_coords_dict): valid_formula_terms.assert_true( standard_name in dimless_coords_dict, - "unknown standard_name '{}' for dimensionless vertical coordinate {}" - "".format(standard_name, coord), + f"unknown standard_name '{standard_name}' for dimensionless vertical coordinate {coord}" + "", ) if standard_name not in dimless_coords_dict: return valid_formula_terms.to_result() valid_formula_terms.assert_true( no_missing_terms(standard_name, terms, dimless_coords_dict), - "{}'s formula_terms are invalid for {}, please see appendix D of CF 1.6" - "".format(coord, standard_name), + f"{coord}'s formula_terms are invalid for {standard_name}, please see appendix D of CF 1.6" + "", ) return valid_formula_terms.to_result() @@ -476,13 +476,8 @@ def _parent_var_attr_type_check(self, attr_name, var, ctx): ctx.assert_true( type_match, - "Attribute '{}' (type: {}) and parent variable '{}' (type: {}) " - "must have equivalent datatypes".format( - attr_name, - val_type, - var.name, - var.dtype.type, - ), + f"Attribute '{attr_name}' (type: {val_type}) and parent variable '{var.name}' (type: {var.dtype.type}) " + "must have equivalent datatypes", ) def _find_aux_coord_vars(self, ds, refresh=False): @@ -1070,8 +1065,8 @@ def _att_loc_msg(att_loc): test_ctx.out_of += 1 if "G" not in att_loc: test_ctx.messages.append( - '[Appendix A] Attribute "{}" should not be present in global (G) ' - "attributes. {}".format(global_att_name, valid_loc_warn), + f'[Appendix A] Attribute "{global_att_name}" should not be present in global (G) ' + f"attributes. {valid_loc_warn}", ) else: result = self._handle_dtype_check(global_att, global_att_name, att_dict) @@ -1109,13 +1104,8 @@ def _att_loc_msg(att_loc): test_ctx.out_of += 1 if coord_letter not in att_loc: test_ctx.messages.append( - '[Appendix A] Attribute "{}" should not be present in {} ' - 'variable "{}". {}'.format( - att_name, - att_loc_print_helper(coord_letter), - var_name, - valid_loc_warn, - ), + f'[Appendix A] Attribute "{att_name}" should not be present in {att_loc_print_helper(coord_letter)} ' + f'variable "{var_name}". {valid_loc_warn}', ) else: result = self._handle_dtype_check(att, att_name, att_dict, var) diff --git a/compliance_checker/ioos.py b/compliance_checker/ioos.py index 3264843e..733b9e91 100644 --- a/compliance_checker/ioos.py +++ b/compliance_checker/ioos.py @@ -1232,8 +1232,8 @@ def check_creator_and_publisher_type(self, ds): else: pass_stat = False messages.append( - "If specified, {} must be in value list " - "({})".format(global_att_name, sorted(expected_types)), + f"If specified, {global_att_name} must be in value list " + f"({sorted(expected_types)})", ) result_list.append( @@ -1390,14 +1390,14 @@ def check_vertical_coordinates(self, ds): valid_vertical_coord = TestCtx(BaseCheck.HIGH, "Vertical coordinates") units_set_msg = ( - "{}'s units attribute {} is not equivalent to one " - "of {}".format(name, units_str, expected_unit_strs) + f"{name}'s units attribute {units_str} is not equivalent to one " + f"of {expected_unit_strs}" ) valid_vertical_coord.assert_true(pass_stat, units_set_msg) pos_msg = ( - "{}: vertical coordinates must include a positive " - "attribute that is either 'up' or 'down'".format(name) + f"{name}: vertical coordinates must include a positive " + "attribute that is either 'up' or 'down'" ) valid_vertical_coord.assert_true(positive in ("up", "down"), pos_msg) @@ -1685,9 +1685,9 @@ def check_qartod_variables_references(self, ds): attval = getattr(v, "references", None) if attval is None: msg = ( - '"references" attribute not present for variable {}.' + f'"references" attribute not present for variable {v.name}.' "If present, it should be a valid URL." - ).format(v.name) + ) val = False else: msg = f'"references" attribute for variable "{v.name}" must be a valid URL' diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index 5db72b67..d0e84769 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -346,11 +346,8 @@ def _process_skip_checks(cls, skip_checks): check_max_level = check_lookup[split_check_spec[1]] except KeyError: warnings.warn( - "Skip specifier '{}' on check '{}' not found," - " defaulting to skip entire check".format( - split_check_spec[1], - check_name, - ), + f"Skip specifier '{split_check_spec[1]}' on check '{check_name}' not found," + " defaulting to skip entire check", stacklevel=2, ) check_max_level = BaseCheck.HIGH diff --git a/compliance_checker/tests/test_acdd.py b/compliance_checker/tests/test_acdd.py index 13b84bb0..9661be73 100644 --- a/compliance_checker/tests/test_acdd.py +++ b/compliance_checker/tests/test_acdd.py @@ -439,7 +439,7 @@ def test_geospatial_bounds(self): if result.variable_name == "geospatial_bounds": assert ( "Could not parse WKT from geospatial_bounds," - ' possible bad value: "{}"'.format(empty_ds.geospatial_bounds) + f' possible bad value: "{empty_ds.geospatial_bounds}"' in result.msgs ) From e7f8647c31e5647b31db47a98600dd8e10e0ab4a Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 5 Sep 2023 14:54:19 -0300 Subject: [PATCH 21/49] fix E721 --- compliance_checker/cf/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index fa87357e..35734acb 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -167,7 +167,7 @@ def get_safe(dict_instance, keypath, default=None): """ try: obj = dict_instance - keylist = keypath if type(keypath) is list else keypath.split(".") + keylist = keypath if isinstance(keypath, list) else keypath.split(".") for key in keylist: obj = obj[key] return obj From ae75c2bab94c1caadd8477d4f4905b7bb8ee3522 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 17:57:26 +0000 Subject: [PATCH 22/49] Bump actions/checkout from 3 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/cc-plugin-glider-test.yml | 2 +- .github/workflows/cc-plugin-sgrid-test.yml | 2 +- .github/workflows/cc-plugin-ugrid-test.yml | 2 +- .github/workflows/codecov.yml | 2 +- .github/workflows/default-tests.yml | 2 +- .github/workflows/deploy-docs.yml | 2 +- .github/workflows/integration-tests.yml | 2 +- .github/workflows/pypi.yml | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/cc-plugin-glider-test.yml b/.github/workflows/cc-plugin-glider-test.yml index 037ff32b..82cb9030 100644 --- a/.github/workflows/cc-plugin-glider-test.yml +++ b/.github/workflows/cc-plugin-glider-test.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Micromamba uses: mamba-org/setup-micromamba@v1 diff --git a/.github/workflows/cc-plugin-sgrid-test.yml b/.github/workflows/cc-plugin-sgrid-test.yml index fec9a182..e8872401 100644 --- a/.github/workflows/cc-plugin-sgrid-test.yml +++ b/.github/workflows/cc-plugin-sgrid-test.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Micromamba uses: mamba-org/setup-micromamba@v1 diff --git a/.github/workflows/cc-plugin-ugrid-test.yml b/.github/workflows/cc-plugin-ugrid-test.yml index 92d346df..b3b74d36 100644 --- a/.github/workflows/cc-plugin-ugrid-test.yml +++ b/.github/workflows/cc-plugin-ugrid-test.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Micromamba uses: mamba-org/setup-micromamba@v1 diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index 55db7a0a..ee836239 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Micromamba uses: mamba-org/setup-micromamba@v1 diff --git a/.github/workflows/default-tests.yml b/.github/workflows/default-tests.yml index 55e0e823..a2c143ee 100644 --- a/.github/workflows/default-tests.yml +++ b/.github/workflows/default-tests.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Micromamba ${{ matrix.python-version }} uses: mamba-org/setup-micromamba@v1 diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 3b22fe7c..6789d321 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -14,7 +14,7 @@ jobs: steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index ecf49146..87c46cfe 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Micromamba uses: mamba-org/setup-micromamba@v1 diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 0a2bb686..d8904e80 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -15,7 +15,7 @@ jobs: packages: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # Should be enough for setuptools-scm fetch-depth: 100 From 2b68fb6c1bd0835fcf17d647a0a5669dad939739 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Sep 2023 02:54:44 +0000 Subject: [PATCH 23/49] Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/codecov.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index ee836239..c32a9fb4 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -37,6 +37,6 @@ jobs: continue-on-error: true - name: Upload to codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: files: coverage.xml From a71829e8b7701037685ce8917a40ec03a7366d54 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Wed, 4 Oct 2023 12:07:44 -0300 Subject: [PATCH 24/49] fix typos --- compliance_checker/cf/cf_1_6.py | 4 ++-- compliance_checker/cfutil.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index 63b00337..dd571c07 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -2026,7 +2026,7 @@ def check_standard_calendar_no_cross(time_var): def _check_leap_time(self, time_variable): """ - Helper method to handle checking custom calendar leap time specifiations + Helper method to handle checking custom calendar leap time specifications """ leap_time = TestCtx(BaseCheck.HIGH, self.section_titles["4.4"]) leap_time.out_of = 1 @@ -3501,7 +3501,7 @@ def check_compression_gathering(self, ds): for item in ds.dimensions.values() if item.name in compress_set ] - # get the upper limt of the dimenssion size + # get the upper limit of the dimenssion size upper_limit_size = np.prod(coord_list_size) - 1 for coord_size in coord_list_size: diff --git a/compliance_checker/cfutil.py b/compliance_checker/cfutil.py index e583570f..b351a94b 100644 --- a/compliance_checker/cfutil.py +++ b/compliance_checker/cfutil.py @@ -277,7 +277,7 @@ def get_auxiliary_coordinate_variables(ds): :param netCDf4.Dataset ds: An open netCDF dataset """ aux_vars = [] - # get any variables referecned by the coordinates attribute + # get any variables referenced by the coordinates attribute for ncvar in ds.get_variables_by_attributes( coordinates=lambda x: isinstance(x, str), ): From 7ad29a7b1e6b2efc1a40822611721dad32eb1e73 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Wed, 4 Oct 2023 12:13:03 -0300 Subject: [PATCH 25/49] v4 is beta and was probably mistankely release as stable --- .github/workflows/codecov.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index c32a9fb4..ee836239 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -37,6 +37,6 @@ jobs: continue-on-error: true - name: Upload to codecov - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v3 with: files: coverage.xml From 95ef64dbd952cf3d71bd845bd46fefd33c197d4a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 04:57:19 +0000 Subject: [PATCH 26/49] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.7.0 → 23.9.1](https://github.com/psf/black/compare/23.7.0...23.9.1) - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.0.287 → v0.0.292](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.287...v0.0.292) - [github.com/tox-dev/pyproject-fmt: 1.1.0 → 1.2.0](https://github.com/tox-dev/pyproject-fmt/compare/1.1.0...1.2.0) - [github.com/codespell-project/codespell: v2.2.5 → v2.2.6](https://github.com/codespell-project/codespell/compare/v2.2.5...v2.2.6) --- .pre-commit-config.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e8b99575..975d0530 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.9.1 hooks: - id: black language_version: python3 @@ -30,18 +30,18 @@ repos: - id: add-trailing-comma -- repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.287 +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.292 hooks: - id: ruff - repo: https://github.com/tox-dev/pyproject-fmt - rev: 1.1.0 + rev: 1.2.0 hooks: - id: pyproject-fmt - repo: https://github.com/codespell-project/codespell - rev: v2.2.5 + rev: v2.2.6 hooks: - id: codespell args: From b95631b856d8ffa55af958fef77f7067bced75b7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:19:09 +0000 Subject: [PATCH 27/49] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) - [github.com/psf/black: 23.9.1 → 23.10.1](https://github.com/psf/black/compare/23.9.1...23.10.1) - [github.com/astral-sh/ruff-pre-commit: v0.0.292 → v0.1.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.292...v0.1.4) - [github.com/tox-dev/pyproject-fmt: 1.2.0 → 1.4.1](https://github.com/tox-dev/pyproject-fmt/compare/1.2.0...1.4.1) --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 975d0530..503365f0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace exclude: compliance_checker/tests/data @@ -19,7 +19,7 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 23.9.1 + rev: 23.10.1 hooks: - id: black language_version: python3 @@ -31,12 +31,12 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.292 + rev: v0.1.4 hooks: - id: ruff - repo: https://github.com/tox-dev/pyproject-fmt - rev: 1.2.0 + rev: 1.4.1 hooks: - id: pyproject-fmt From dc1d502c4ed5f1c1be544d56d1eb6eb4376de6a5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 19:19:00 +0000 Subject: [PATCH 28/49] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.10.1 → 23.11.0](https://github.com/psf/black/compare/23.10.1...23.11.0) - [github.com/astral-sh/ruff-pre-commit: v0.1.4 → v0.1.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.4...v0.1.6) - [github.com/tox-dev/pyproject-fmt: 1.4.1 → 1.5.3](https://github.com/tox-dev/pyproject-fmt/compare/1.4.1...1.5.3) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 503365f0..fb4b8f8a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 23.10.1 + rev: 23.11.0 hooks: - id: black language_version: python3 @@ -31,12 +31,12 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.4 + rev: v0.1.6 hooks: - id: ruff - repo: https://github.com/tox-dev/pyproject-fmt - rev: 1.4.1 + rev: 1.5.3 hooks: - id: pyproject-fmt From be23f224d85a32874479224c5d87ec46e59ef7ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 02:48:28 +0000 Subject: [PATCH 29/49] Bump actions/setup-python from 4 to 5 Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/pypi.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index d8904e80..e134f7c0 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -25,7 +25,7 @@ jobs: run: git fetch origin 'refs/tags/*:refs/tags/*' - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.11" From 0d2981f2ffae1afc839616ae3ca17d18ffb14b4c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 19:13:21 +0000 Subject: [PATCH 30/49] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.11.0 → 23.12.1](https://github.com/psf/black/compare/23.11.0...23.12.1) - [github.com/astral-sh/ruff-pre-commit: v0.1.6 → v0.1.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.6...v0.1.9) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fb4b8f8a..e65dfe3a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 23.11.0 + rev: 23.12.1 hooks: - id: black language_version: python3 @@ -31,7 +31,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.6 + rev: v0.1.9 hooks: - id: ruff From 4e6e3d1997c599398da095562bba5dd7aaaeda7d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 02:53:23 +0000 Subject: [PATCH 31/49] Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/codecov.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index ee836239..c32a9fb4 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -37,6 +37,6 @@ jobs: continue-on-error: true - name: Upload to codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: files: coverage.xml From ead0a4a242d76db25a02a4366c574bf754ade4da Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 19:17:04 +0000 Subject: [PATCH 32/49] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/psf/black: 23.12.1 → 24.1.1](https://github.com/psf/black/compare/23.12.1...24.1.1) - [github.com/astral-sh/ruff-pre-commit: v0.1.9 → v0.2.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.9...v0.2.0) - [github.com/tox-dev/pyproject-fmt: 1.5.3 → 1.7.0](https://github.com/tox-dev/pyproject-fmt/compare/1.5.3...1.7.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e65dfe3a..9e13b767 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.1.1 hooks: - id: black language_version: python3 @@ -31,12 +31,12 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.2.0 hooks: - id: ruff - repo: https://github.com/tox-dev/pyproject-fmt - rev: 1.5.3 + rev: 1.7.0 hooks: - id: pyproject-fmt From 0b026903e794c05ab4474f1835a31f6adae7d973 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 19:17:18 +0000 Subject: [PATCH 33/49] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- compliance_checker/ioos.py | 35 +++++++++++++++++------------ compliance_checker/tests/test_cf.py | 10 ++++----- compliance_checker/util.py | 1 + 3 files changed, 26 insertions(+), 20 deletions(-) diff --git a/compliance_checker/ioos.py b/compliance_checker/ioos.py index 733b9e91..2deed991 100644 --- a/compliance_checker/ioos.py +++ b/compliance_checker/ioos.py @@ -1,6 +1,7 @@ """ Check for IOOS-approved attributes """ + import re from numbers import Number @@ -1543,12 +1544,14 @@ def check_gts_ingest_requirements(self, ds): BaseCheck.HIGH, False, # always fail "NDBC/GTS Ingest Requirements", - [var_passed_ingest_msg.format(", ".join(_var_passed))] - if all_passed_ingest_reqs - else [ - var_passed_ingest_msg.format(", ".join(_var_passed)), - var_failed_ingest_msg.format(", ".join(_var_failed)), - ], + ( + [var_passed_ingest_msg.format(", ".join(_var_passed))] + if all_passed_ingest_reqs + else [ + var_passed_ingest_msg.format(", ".join(_var_passed)), + var_failed_ingest_msg.format(", ".join(_var_failed)), + ] + ), ) def check_instrument_variables(self, ds): @@ -1764,9 +1767,11 @@ def check_instrument_make_model_calib_date(self, ds): BaseCheck.MEDIUM, valid, "instrument_variable:make_model", - None - if valid - else [f"Attribute {v}:make_model ({mm}) should be a string"], + ( + None + if valid + else [f"Attribute {v}:make_model ({mm}) should be a string"] + ), ), ) @@ -1784,11 +1789,13 @@ def check_instrument_make_model_calib_date(self, ds): BaseCheck.MEDIUM, valid, "instrument_variable:calibration_date", - None - if valid - else [ - f"Attribute {v}:calibration_date ({cd}) should be an ISO-8601 string", - ], + ( + None + if valid + else [ + f"Attribute {v}:calibration_date ({cd}) should be an ISO-8601 string", + ] + ), ), ) diff --git a/compliance_checker/tests/test_cf.py b/compliance_checker/tests/test_cf.py index 367ffc6d..360683be 100644 --- a/compliance_checker/tests/test_cf.py +++ b/compliance_checker/tests/test_cf.py @@ -1300,9 +1300,7 @@ def test_check_time_coordinate(self): # NB: >= 60 seconds is nonstandard, but isn't actually a CF requirement # until CF 1.9 onwards dataset.variables["time"].units = "months since 0-1-1 23:00:60" - dataset.variables[ - "time" - ].climatology = ( + dataset.variables["time"].climatology = ( "nonexistent_variable_reference_only_used_to_test_year_zero_failure" ) results = self.cf.check_time_coordinate(dataset) @@ -2952,9 +2950,9 @@ def test_bad_lsid(self): messages = results[0].msgs assert results[0].value[0] < results[0].value[1] assert len(messages) == 1 - taxon_lsid[ - 0 - ] = "http://www.lsid.info/urn:lsid:marinespecies.org:taxname:99999999999" + taxon_lsid[0] = ( + "http://www.lsid.info/urn:lsid:marinespecies.org:taxname:99999999999" + ) results = self.cf.check_taxa(dataset) assert messages[0].startswith( "Taxon id must match one of the following forms:", diff --git a/compliance_checker/util.py b/compliance_checker/util.py index 8117ad3e..6b03e98b 100644 --- a/compliance_checker/util.py +++ b/compliance_checker/util.py @@ -1,6 +1,7 @@ """ General purpose utility functions to aid in compliance checking tasks """ + from collections import OrderedDict import isodate From eeeb23d6003dfe4ed54ac6facf2882b91caef452 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Apr 2024 02:18:02 +0000 Subject: [PATCH 34/49] Bump peaceiris/actions-gh-pages from 3 to 4 Bumps [peaceiris/actions-gh-pages](https://github.com/peaceiris/actions-gh-pages) from 3 to 4. - [Release notes](https://github.com/peaceiris/actions-gh-pages/releases) - [Changelog](https://github.com/peaceiris/actions-gh-pages/blob/main/CHANGELOG.md) - [Commits](https://github.com/peaceiris/actions-gh-pages/compare/v3...v4) --- updated-dependencies: - dependency-name: peaceiris/actions-gh-pages dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 6789d321..6b0fe0da 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -45,7 +45,7 @@ jobs: - name: Deploy if: success() && github.event_name == 'release' - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: docs/build/html From 74b13c0ae7a2d72d4be67420fc75e445219ce428 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 1 Aug 2023 14:37:28 -0300 Subject: [PATCH 35/49] remove all but 1 pkg_resources imports --- compliance_checker/cf/util.py | 8 ++--- compliance_checker/cfutil.py | 4 +-- compliance_checker/protocols/netcdf.py | 1 + compliance_checker/suite.py | 9 ++---- compliance_checker/tests/__init__.py | 5 +-- compliance_checker/tests/conftest.py | 7 +++-- compliance_checker/tests/resources.py | 10 +++--- compliance_checker/tests/test_suite.py | 43 ++++++++++++-------------- 8 files changed, 40 insertions(+), 47 deletions(-) diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index 35734acb..64a21c1a 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -1,3 +1,4 @@ +import importlib.resources import itertools import os import sys @@ -7,7 +8,6 @@ from cf_units import Unit from lxml import etree from netCDF4 import Dataset -from pkg_resources import resource_filename # copied from paegan # paegan may depend on these later @@ -284,9 +284,9 @@ def download_cf_standard_name_table(version, location=None): if ( location is None ): # This case occurs when updating the packaged version from command line - location = resource_filename( - "compliance_checker", - "data/cf-standard-name-table.xml", + location = ( + importlib.resources.files("compliance_checker") + / "data/cf-standard-name-table.xml" ) if version == "latest": diff --git a/compliance_checker/cfutil.py b/compliance_checker/cfutil.py index b351a94b..22195a22 100644 --- a/compliance_checker/cfutil.py +++ b/compliance_checker/cfutil.py @@ -3,13 +3,13 @@ compliance_checker/cfutil.py """ import csv +import importlib.resources import re import warnings from collections import defaultdict from functools import lru_cache, partial from cf_units import Unit -from pkg_resources import resource_filename _UNITLESS_DB = None _SEA_NAMES = None @@ -128,7 +128,7 @@ def get_sea_names(): if _SEA_NAMES is None: buf = {} with open( - resource_filename("compliance_checker", "data/seanames.csv"), + importlib.resources.files("compliance_checker") / "data/seanames.csv", ) as f: reader = csv.reader(f) for code, sea_name in reader: diff --git a/compliance_checker/protocols/netcdf.py b/compliance_checker/protocols/netcdf.py index 415a94ec..4943ac8a 100644 --- a/compliance_checker/protocols/netcdf.py +++ b/compliance_checker/protocols/netcdf.py @@ -17,6 +17,7 @@ def is_netcdf(url): :param str url: Location of file on the file system """ # Try an obvious exclusion of remote resources + url = str(url) if url.startswith("http"): return False diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index d0e84769..c512f91b 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -765,12 +765,9 @@ def generate_dataset(self, cdl_path): :param str cdl_path: Absolute path to cdl file that is used to generate netCDF file """ - if ( - ".cdl" in cdl_path - ): # it's possible the filename doesn't have the .cdl extension - ds_str = cdl_path.replace(".cdl", ".nc") - else: - ds_str = cdl_path + ".nc" + if isinstance(cdl_path, str): + cdl_path = Path(cdl_path) + ds_str = cdl_path.with_suffix(".nc") # generate netCDF-4 file iostat = subprocess.run( diff --git a/compliance_checker/tests/__init__.py b/compliance_checker/tests/__init__.py index d8060c2a..1d907517 100644 --- a/compliance_checker/tests/__init__.py +++ b/compliance_checker/tests/__init__.py @@ -1,4 +1,5 @@ import unittest +from pathlib import Path from netCDF4 import Dataset @@ -25,8 +26,8 @@ def load_dataset(self, nc_dataset): """ Return a loaded NC Dataset for the given path """ - if not isinstance(nc_dataset, str): - raise ValueError("nc_dataset should be a string") + if not isinstance(nc_dataset, (str, Path)): + raise ValueError("nc_dataset should be a valid path") nc_dataset = Dataset(nc_dataset, "r") self.addCleanup(nc_dataset.close) diff --git a/compliance_checker/tests/conftest.py b/compliance_checker/tests/conftest.py index 482bd814..f4002bc5 100644 --- a/compliance_checker/tests/conftest.py +++ b/compliance_checker/tests/conftest.py @@ -1,11 +1,10 @@ +import importlib.resources import os import subprocess from itertools import chain -from pathlib import Path import pytest from netCDF4 import Dataset -from pkg_resources import resource_filename from compliance_checker.cf import util from compliance_checker.suite import CheckSuite @@ -27,7 +26,9 @@ def static_files(cdl_stem): Returns the Path to a valid nc dataset\n replaces the old STATIC_FILES dict """ - datadir = Path(resource_filename("compliance_checker", "tests/data")).resolve() + datadir = ( + importlib.resources.files("compliance_checker").joinpath("tests/data").resolve() + ) assert datadir.exists(), f"{datadir} not found" cdl_paths = glob_down(datadir, f"{cdl_stem}.cdl", 3) diff --git a/compliance_checker/tests/resources.py b/compliance_checker/tests/resources.py index 4bcf314c..67786179 100644 --- a/compliance_checker/tests/resources.py +++ b/compliance_checker/tests/resources.py @@ -1,16 +1,14 @@ -import os +import importlib.resources import subprocess -from pkg_resources import resource_filename - def get_filename(path): """ Returns the path to a valid dataset """ - filename = resource_filename("compliance_checker", path) - nc_path = filename.replace(".cdl", ".nc") - if not os.path.exists(nc_path): + filename = importlib.resources.files("compliance_checker") / path + nc_path = filename.with_suffix(".nc") + if not nc_path.exists(): generate_dataset(filename, nc_path) return nc_path diff --git a/compliance_checker/tests/test_suite.py b/compliance_checker/tests/test_suite.py index 54f49a95..0bbbc44d 100644 --- a/compliance_checker/tests/test_suite.py +++ b/compliance_checker/tests/test_suite.py @@ -1,35 +1,30 @@ +import importlib.resources import os import unittest from pathlib import Path import numpy as np -from pkg_resources import resource_filename from compliance_checker.acdd import ACDDBaseCheck from compliance_checker.base import BaseCheck, GenericFile, Result from compliance_checker.suite import CheckSuite static_files = { - "2dim": resource_filename("compliance_checker", "tests/data/2dim-grid.nc"), - "bad_region": resource_filename("compliance_checker", "tests/data/bad_region.nc"), - "bad_data_type": resource_filename( - "compliance_checker", - "tests/data/bad_data_type.nc", - ), - "test_cdl": resource_filename("compliance_checker", "tests/data/test_cdl.cdl"), - "test_cdl_nc": resource_filename( - "compliance_checker", - "tests/data/test_cdl_nc_file.nc", - ), - "empty": resource_filename("compliance_checker", "tests/data/non-comp/empty.file"), - "ru07": resource_filename( - "compliance_checker", - "tests/data/ru07-20130824T170228_rt0.nc", - ), - "netCDF4": resource_filename( - "compliance_checker", - "tests/data/test_cdl_nc4_file.cdl", - ), + "2dim": importlib.resources.files("compliance_checker") / "tests/data/2dim-grid.nc", + "bad_region": importlib.resources.files("compliance_checker") + / "tests/data/bad_region.nc", + "bad_data_type": importlib.resources.files("compliance_checker") + / "tests/data/bad_data_type.nc", + "test_cdl": importlib.resources.files("compliance_checker") + / "tests/data/test_cdl.cdl", + "test_cdl_nc": importlib.resources.files("compliance_checker") + / "tests/data/test_cdl_nc_file.nc", + "empty": importlib.resources.files("compliance_checker") + / "tests/data/non-comp/empty.file", + "ru07": importlib.resources.files("compliance_checker") + / "tests/data/ru07-20130824T170228_rt0.nc", + "netCDF4": importlib.resources.files("compliance_checker") + / "tests/data/test_cdl_nc4_file.cdl", } @@ -95,9 +90,9 @@ def test_generate_dataset_netCDF4(self): # create netCDF4 file ds_name = self.cs.generate_dataset(static_files["netCDF4"]) # check if correct name is return - assert ds_name == static_files["netCDF4"].replace(".cdl", ".nc") + assert ds_name == static_files["netCDF4"].with_suffix(".nc") # check if netCDF4 file was created - assert os.path.isfile(static_files["netCDF4"].replace(".cdl", ".nc")) + assert os.path.isfile(static_files["netCDF4"].with_suffix(".nc")) def test_include_checks(self): ds = self.cs.load_dataset(static_files["bad_data_type"]) @@ -242,7 +237,7 @@ def test_cdl_file(self): ) ds.close() - nc_file_path = static_files["test_cdl"].replace(".cdl", ".nc") + nc_file_path = static_files["test_cdl"].with_suffix(".nc") self.addCleanup(os.remove, nc_file_path) # Ok the scores should be equal! From 29175200597f3b82dc74e6f26cf1c6bdc0a309f0 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Wed, 9 Aug 2023 11:13:50 -0300 Subject: [PATCH 36/49] remove distutils --- compliance_checker/suite.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index c512f91b..7d922110 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -13,7 +13,6 @@ import warnings from collections import defaultdict from datetime import datetime, timezone -from distutils.version import StrictVersion from operator import itemgetter from pathlib import Path from urllib.parse import urlparse @@ -23,6 +22,7 @@ from netCDF4 import Dataset from owslib.sos import SensorObservationService from owslib.swe.sensor.sml import SensorML +from packaging.version import parse from pkg_resources import working_set from compliance_checker import __version__, tempnc @@ -186,9 +186,8 @@ def _load_checkers(cls, checkers): for spec, versions in itertools.groupby(ver_checkers, itemgetter(0)): version_nums = [v[-1] for v in versions] try: - latest_version = str(max(StrictVersion(v) for v in version_nums)) - # if the version can't be parsed as a StrictVersion, parse - # according to character collation + latest_version = str(max(parse(v) for v in version_nums)) + # if the version can't be parsed, do it according to character collation except ValueError: latest_version = max(version_nums) cls.checkers[spec] = cls.checkers[spec + ":latest"] = cls.checkers[ From 02884b44e228f0dfccd2a1c7c71c2f7d9bee4cd9 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Tue, 17 Oct 2023 18:51:46 -0300 Subject: [PATCH 37/49] add packaging --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 3df7b0d9..5c04ef39 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,6 +5,7 @@ jinja2>=2.7.3 lxml>=3.2.1 netcdf4>=1.6.4 owsLib>=0.8.3 +packaging pendulum>=1.2.4 pygeoif>=0.6 pyproj>=2.2.1 From 96e004940f66341a832f5e71b17cd07fbe26637a Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 11:13:06 +0200 Subject: [PATCH 38/49] update pre-commits --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9e13b767..f0f1a089 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: trailing-whitespace exclude: compliance_checker/tests/data @@ -19,7 +19,7 @@ repos: - test_requirements.txt - repo: https://github.com/psf/black - rev: 24.1.1 + rev: 24.3.0 hooks: - id: black language_version: python3 @@ -31,7 +31,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.0 + rev: v0.3.5 hooks: - id: ruff From 3ddd88bbb472aa3d10c5b253cad7285685ac04bc Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 11:13:36 +0200 Subject: [PATCH 39/49] fix deprecation warnings --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index af8b5817..be14e45f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ requires = [ ] [tool.ruff] -select = [ +lint.select = [ "A", # flake8-builtins "B", # flake8-bugbear "C4", # flake8-comprehensions @@ -24,11 +24,11 @@ exclude = [ "compliance_checker/cf/cf.py", ] -ignore = [ +lint.ignore = [ "E501", ] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "docs/source/conf.py" = [ "E402", "A001", From d3ee89b25f7e68aa5c4f54304b5eb6bff64ebe99 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 11:13:51 +0200 Subject: [PATCH 40/49] fix some lints --- compliance_checker/cf/cf_1_6.py | 7 +------ compliance_checker/cf/cf_1_7.py | 7 +------ compliance_checker/tests/test_cf_integration.py | 4 +--- 3 files changed, 3 insertions(+), 15 deletions(-) diff --git a/compliance_checker/cf/cf_1_6.py b/compliance_checker/cf/cf_1_6.py index dd571c07..2e31f029 100644 --- a/compliance_checker/cf/cf_1_6.py +++ b/compliance_checker/cf/cf_1_6.py @@ -2764,12 +2764,7 @@ def check_cell_boundaries(self, ds): ): valid = False reasoning.append( - "Dimension {} of boundary variable (for {}) must have at least {} elements to form a simplex/closed cell with previous dimensions {}.".format( - boundary_variable.name, - variable.name, - len(variable.dimensions) + 1, - boundary_variable.dimensions[:-1], - ), + f"Dimension {boundary_variable.name} of boundary variable (for {variable.name}) must have at least {len(variable.dimensions) + 1} elements to form a simplex/closed cell with previous dimensions {boundary_variable.dimensions[:-1]}.", ) result = Result( BaseCheck.MEDIUM, diff --git a/compliance_checker/cf/cf_1_7.py b/compliance_checker/cf/cf_1_7.py index e47ca240..50eb05ee 100644 --- a/compliance_checker/cf/cf_1_7.py +++ b/compliance_checker/cf/cf_1_7.py @@ -283,12 +283,7 @@ def check_cell_boundaries(self, ds): ): valid = False reasoning.append( - "Dimension {} of boundary variable (for {}) must have at least {} elements to form a simplex/closed cell with previous dimensions {}.".format( - boundary_variable.name, - variable.name, - len(variable.dimensions) + 1, - boundary_variable.dimensions[:-1], - ), + f"Dimension {boundary_variable.name} of boundary variable (for {variable.name}) must have at least {len(variable.dimensions) + 1} elements to form a simplex/closed cell with previous dimensions {boundary_variable.dimensions[:-1]}.", ) # 7.1 Required 3/5: diff --git a/compliance_checker/tests/test_cf_integration.py b/compliance_checker/tests/test_cf_integration.py index 5d162672..33f8fa29 100644 --- a/compliance_checker/tests/test_cf_integration.py +++ b/compliance_checker/tests/test_cf_integration.py @@ -45,9 +45,7 @@ '§2.6.1 Conventions global attribute does not contain "CF-1.8"', f"standard_name visibility is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['visibility_in_air']", 'Standard name modifier "data_quality" for variable visibility_qc is not a valid modifier according to CF Appendix C', - "standard_name wind_direction is not defined in Standard Name Table v{}. Possible close match(es): ['wind_to_direction', 'wind_from_direction', 'wind_gust_from_direction']".format( - std_names._version, - ), + f"standard_name wind_direction is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['wind_to_direction', 'wind_from_direction', 'wind_gust_from_direction']", 'Standard name modifier "data_quality" for variable wind_direction_qc is not a valid modifier according to CF Appendix C', f"standard_name wind_gust is not defined in Standard Name Table v{std_names._version}. Possible close match(es): ['y_wind_gust', 'x_wind_gust', 'wind_speed_of_gust']", 'Standard name modifier "data_quality" for variable wind_gust_qc is not a valid modifier according to CF Appendix C', From 2a5b851f5cdf56a53ec6e79b93ffb44711e73acd Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Mon, 17 Apr 2023 16:08:46 -0300 Subject: [PATCH 41/49] move to pyproject.toml --- pyproject.toml | 85 ++++++++++++++++++++++++++++++++++++++++++++++++++ setup.py | 81 ----------------------------------------------- 2 files changed, 85 insertions(+), 81 deletions(-) delete mode 100644 setup.py diff --git a/pyproject.toml b/pyproject.toml index be14e45f..19bfa321 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,91 @@ requires = [ "wheel", ] +<<<<<<< HEAD +======= +[project] +name = "compliance-checker" +description = "Checks Datasets and SOS endpoints for standards compliance" +readme = "README.md" +license = {text = "Apache-2.0"} +authors = [ + {name = "Dave Foster", email = "dave@axiomdatascience.com"}, +] +requires-python = ">=3.8" +classifiers=[ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering", +] +dynamic = [ + "dependencies", + "version", +] +[project.urls] +documentation = "https://ioos.github.io/compliance-checker" +homepage = "https://compliance.ioos.us/index.html" +repository = "https://github.com/ioos/compliance-checker" +[project.scripts] +compliance-checker = "cchecker:main" +[project.entry-points."compliance_checker.suites"] +"acdd-1.1" = "compliance_checker.acdd:ACDD1_1Check" +"acdd-1.3" = "compliance_checker.acdd:ACDD1_3Check" +"cf-1.6" = "compliance_checker.cf.cf:CF1_6Check" +"cf-1.7" = "compliance_checker.cf.cf:CF1_7Check" +"cf-1.8" = "compliance_checker.cf.cf:CF1_8Check" +"ioos-0.1" = "compliance_checker.ioos:IOOS0_1Check" +"ioos-1.1" = "compliance_checker.ioos:IOOS1_1Check" +"ioos-1.2" = "compliance_checker.ioos:IOOS1_2Check" +"ioos_sos" = "compliance_checker.ioos:IOOSBaseSOSCheck" + +[tool.setuptools] +packages = ["compliance_checker"] +license-files = ["LICENSE"] +zip-safe = false +include-package-data = true +script-files = ["cchecker.py"] + +[tool.setuptools.package-data] +compliance_checker = [ + "data/*.xml", + "tests/data/*.nc", + "tests/data/*.cdl", + "tests/data/non-comp/*.cdl", + "data/templates/*.j2", +] + +[tool.setuptools.dynamic] +dependencies = {file = ["requirements.txt"]} +readme = {file = "README.md", content-type = "text/markdown"} + +[tool.setuptools_scm] +write_to = "compliance_checker/_version.py" +write_to_template = "__version__ = '{version}'" +tag_regex = "^(?Pv)?(?P[^\\+]+)(?P.*)?$" + +[tool.pytest.ini_options] +markers = [ + "integration: marks integration tests (deselect with '-m \"not integration\"')", + "slowtest: marks slow tests (deselect with '-m \"not slowtest\"')" +] +filterwarnings = [ + "error:::compliance-checker.*", + "ignore::UserWarning", + "ignore::RuntimeWarning", +] + +>>>>>>> 2fb66ed (move to pyproject.toml) [tool.ruff] lint.select = [ "A", # flake8-builtins diff --git a/setup.py b/setup.py deleted file mode 100644 index e26a432b..00000000 --- a/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -from setuptools import find_packages, setup - - -def readme(): - with open("README.md", encoding="utf-8") as f: - return f.read() - - -def pip_requirements(fname="requirements.txt"): - reqs = [] - with open(fname) as f: - for line in f: - line = line.strip() - if not line or line.startswith("#"): - continue - reqs.append(line) - - return reqs - - -setup( - name="compliance-checker", - description="Checks Datasets and SOS endpoints for standards compliance", - long_description=readme(), - long_description_content_type="text/markdown", - license="Apache License 2.0", - author="Dave Foster", - author_email="dave@axiomdatascience.com", - url="https://github.com/ioos/compliance-checker", - packages=find_packages(), - install_requires=pip_requirements(), - python_requires="~=3.7", - tests_require=["pytest"], - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: Apache Software License", - "Operating System :: POSIX :: Linux", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Programming Language :: Python", - "Topic :: Scientific/Engineering", - ], - include_package_data=True, - scripts=["cchecker.py"], - # Note: Do not use colons in the entry-point keys. Python 3 reserves - # portions of the key after a colon for special use. - # Note: The entry point names are not used at all. All methods in the - # compliance checker use class attributes to determine the checker's name - # and version. But, an entry point must be defined for each plugin to be - # loaded. - entry_points={ - "console_scripts": ["compliance-checker = cchecker:main"], - "compliance_checker.suites": [ - "cf-1.6 = compliance_checker.cf.cf:CF1_6Check", - "cf-1.7 = compliance_checker.cf.cf:CF1_7Check", - "cf-1.8 = compliance_checker.cf.cf:CF1_8Check", - "acdd-1.1 = compliance_checker.acdd:ACDD1_1Check", - "acdd-1.3 = compliance_checker.acdd:ACDD1_3Check", - "ioos_sos = compliance_checker.ioos:IOOSBaseSOSCheck", - "ioos-0.1 = compliance_checker.ioos:IOOS0_1Check", - "ioos-1.1 = compliance_checker.ioos:IOOS1_1Check", - "ioos-1.2 = compliance_checker.ioos:IOOS1_2Check", - ], - }, - package_data={ - "compliance_checker": [ - "data/*.xml", - "tests/data/*.nc", - "tests/data/*.cdl", - "tests/data/non-comp/*.cdl", - "data/templates/*.j2", - ], - }, - use_scm_version={ - "write_to": "compliance_checker/_version.py", - "write_to_template": '__version__ = "{version}"', - "tag_regex": r"^(?Pv)?(?P[^\+]+)(?P.*)?$", - }, -) From da5c97da916dbd7dc0200a25223053bff42cb920 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Mon, 29 May 2023 12:20:17 -0300 Subject: [PATCH 42/49] add top 3 committers as maintainers --- pyproject.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 19bfa321..3dd376e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,8 +13,11 @@ name = "compliance-checker" description = "Checks Datasets and SOS endpoints for standards compliance" readme = "README.md" license = {text = "Apache-2.0"} -authors = [ +maintainers = [ {name = "Dave Foster", email = "dave@axiomdatascience.com"}, + {name = "Benjamin Adams"}, + {name = "Luke Campbell"}, + {name = "Filipe Fernandes"}, ] requires-python = ">=3.8" classifiers=[ From 5bc09789b8b7dcbfa92d96f68878a76d94a56be5 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Wed, 4 Oct 2023 12:32:38 -0300 Subject: [PATCH 43/49] lint --- pyproject.toml | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3dd376e0..a8d46718 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,8 +6,6 @@ requires = [ "wheel", ] -<<<<<<< HEAD -======= [project] name = "compliance-checker" description = "Checks Datasets and SOS endpoints for standards compliance" @@ -82,18 +80,6 @@ write_to = "compliance_checker/_version.py" write_to_template = "__version__ = '{version}'" tag_regex = "^(?Pv)?(?P[^\\+]+)(?P.*)?$" -[tool.pytest.ini_options] -markers = [ - "integration: marks integration tests (deselect with '-m \"not integration\"')", - "slowtest: marks slow tests (deselect with '-m \"not slowtest\"')" -] -filterwarnings = [ - "error:::compliance-checker.*", - "ignore::UserWarning", - "ignore::RuntimeWarning", -] - ->>>>>>> 2fb66ed (move to pyproject.toml) [tool.ruff] lint.select = [ "A", # flake8-builtins @@ -129,3 +115,8 @@ markers = [ "integration: marks integration tests (deselect with '-m \"not integration\"')", "slowtest: marks slow tests (deselect with '-m \"not slowtest\"')" ] +filterwarnings = [ + "error:::compliance-checker.*", + "ignore::UserWarning", + "ignore::RuntimeWarning", +] From 3f892d55d2fa9eee8b8bc2eac6cc8cf5ebb4f0f4 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 14:13:22 +0200 Subject: [PATCH 44/49] add py312 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index a8d46718..a78aad24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ classifiers=[ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering", ] dynamic = [ From a7e4cc796afb7f7a054ab92257a4a4a97486249d Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 14:15:31 +0200 Subject: [PATCH 45/49] drop 3.7 and add 3.12 in the tests --- .github/workflows/default-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/default-tests.yml b/.github/workflows/default-tests.yml index a2c143ee..d1d05a27 100644 --- a/.github/workflows/default-tests.yml +++ b/.github/workflows/default-tests.yml @@ -9,7 +9,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] os: [windows-latest, ubuntu-latest, macos-latest] fail-fast: false From 96dd622abd971b5639011dc82f1af073510cc871 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 14:15:51 +0200 Subject: [PATCH 46/49] no need to use minor version here --- .github/workflows/pypi.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index e134f7c0..8bfd0b60 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -27,7 +27,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3" - name: Install build tools run: | From 77ac3ae11850bb16942f504f26e2ec6657e16ee9 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 14:20:59 +0200 Subject: [PATCH 47/49] revert to 3.11 b/c cf_units is broken on 3.12 --- .github/workflows/pypi.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 8bfd0b60..e134f7c0 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -27,7 +27,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3" + python-version: "3.11" - name: Install build tools run: | From d53d22768e50df7ae05d28f7abeeccedef071419 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 14:24:35 +0200 Subject: [PATCH 48/49] missing } --- .github/workflows/default-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/default-tests.yml b/.github/workflows/default-tests.yml index d1d05a27..68b90948 100644 --- a/.github/workflows/default-tests.yml +++ b/.github/workflows/default-tests.yml @@ -22,7 +22,7 @@ jobs: environment-name: TEST init-shell: bash create-args: >- - python=${{ matrix.python-version } pip + python=${{ matrix.python-version }} pip --file requirements.txt --file test_requirements.txt --channel conda-forge From e44eda1ab4140808d627689e8721c9e2621c8ef4 Mon Sep 17 00:00:00 2001 From: Filipe Fernandes Date: Thu, 11 Apr 2024 14:39:31 +0200 Subject: [PATCH 49/49] py38 compat --- compliance_checker/cf/util.py | 7 ++----- compliance_checker/cfutil.py | 4 ++-- compliance_checker/tests/conftest.py | 6 ++---- compliance_checker/tests/resources.py | 5 +++-- compliance_checker/tests/test_suite.py | 25 +++++++++---------------- requirements.txt | 1 + 6 files changed, 19 insertions(+), 29 deletions(-) diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index 64a21c1a..6f100653 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -1,4 +1,3 @@ -import importlib.resources import itertools import os import sys @@ -6,6 +5,7 @@ import requests from cf_units import Unit +from importlib_resources import files from lxml import etree from netCDF4 import Dataset @@ -284,10 +284,7 @@ def download_cf_standard_name_table(version, location=None): if ( location is None ): # This case occurs when updating the packaged version from command line - location = ( - importlib.resources.files("compliance_checker") - / "data/cf-standard-name-table.xml" - ) + location = files("compliance_checker") / "data/cf-standard-name-table.xml" if version == "latest": url = "http://cfconventions.org/Data/cf-standard-names/current/src/cf-standard-name-table.xml" diff --git a/compliance_checker/cfutil.py b/compliance_checker/cfutil.py index 22195a22..245c6fd3 100644 --- a/compliance_checker/cfutil.py +++ b/compliance_checker/cfutil.py @@ -3,13 +3,13 @@ compliance_checker/cfutil.py """ import csv -import importlib.resources import re import warnings from collections import defaultdict from functools import lru_cache, partial from cf_units import Unit +from importlib_resources import files _UNITLESS_DB = None _SEA_NAMES = None @@ -128,7 +128,7 @@ def get_sea_names(): if _SEA_NAMES is None: buf = {} with open( - importlib.resources.files("compliance_checker") / "data/seanames.csv", + files("compliance_checker") / "data/seanames.csv", ) as f: reader = csv.reader(f) for code, sea_name in reader: diff --git a/compliance_checker/tests/conftest.py b/compliance_checker/tests/conftest.py index f4002bc5..2c662c16 100644 --- a/compliance_checker/tests/conftest.py +++ b/compliance_checker/tests/conftest.py @@ -1,9 +1,9 @@ -import importlib.resources import os import subprocess from itertools import chain import pytest +from importlib_resources import files from netCDF4 import Dataset from compliance_checker.cf import util @@ -26,9 +26,7 @@ def static_files(cdl_stem): Returns the Path to a valid nc dataset\n replaces the old STATIC_FILES dict """ - datadir = ( - importlib.resources.files("compliance_checker").joinpath("tests/data").resolve() - ) + datadir = files("compliance_checker").joinpath("tests/data").resolve() assert datadir.exists(), f"{datadir} not found" cdl_paths = glob_down(datadir, f"{cdl_stem}.cdl", 3) diff --git a/compliance_checker/tests/resources.py b/compliance_checker/tests/resources.py index 67786179..7bbca9d6 100644 --- a/compliance_checker/tests/resources.py +++ b/compliance_checker/tests/resources.py @@ -1,12 +1,13 @@ -import importlib.resources import subprocess +from importlib_resources import files + def get_filename(path): """ Returns the path to a valid dataset """ - filename = importlib.resources.files("compliance_checker") / path + filename = files("compliance_checker") / path nc_path = filename.with_suffix(".nc") if not nc_path.exists(): generate_dataset(filename, nc_path) diff --git a/compliance_checker/tests/test_suite.py b/compliance_checker/tests/test_suite.py index 0bbbc44d..8c8987df 100644 --- a/compliance_checker/tests/test_suite.py +++ b/compliance_checker/tests/test_suite.py @@ -1,30 +1,23 @@ -import importlib.resources import os import unittest from pathlib import Path import numpy as np +from importlib_resources import files from compliance_checker.acdd import ACDDBaseCheck from compliance_checker.base import BaseCheck, GenericFile, Result from compliance_checker.suite import CheckSuite static_files = { - "2dim": importlib.resources.files("compliance_checker") / "tests/data/2dim-grid.nc", - "bad_region": importlib.resources.files("compliance_checker") - / "tests/data/bad_region.nc", - "bad_data_type": importlib.resources.files("compliance_checker") - / "tests/data/bad_data_type.nc", - "test_cdl": importlib.resources.files("compliance_checker") - / "tests/data/test_cdl.cdl", - "test_cdl_nc": importlib.resources.files("compliance_checker") - / "tests/data/test_cdl_nc_file.nc", - "empty": importlib.resources.files("compliance_checker") - / "tests/data/non-comp/empty.file", - "ru07": importlib.resources.files("compliance_checker") - / "tests/data/ru07-20130824T170228_rt0.nc", - "netCDF4": importlib.resources.files("compliance_checker") - / "tests/data/test_cdl_nc4_file.cdl", + "2dim": files("compliance_checker") / "tests/data/2dim-grid.nc", + "bad_region": files("compliance_checker") / "tests/data/bad_region.nc", + "bad_data_type": files("compliance_checker") / "tests/data/bad_data_type.nc", + "test_cdl": files("compliance_checker") / "tests/data/test_cdl.cdl", + "test_cdl_nc": files("compliance_checker") / "tests/data/test_cdl_nc_file.nc", + "empty": files("compliance_checker") / "tests/data/non-comp/empty.file", + "ru07": files("compliance_checker") / "tests/data/ru07-20130824T170228_rt0.nc", + "netCDF4": files("compliance_checker") / "tests/data/test_cdl_nc4_file.cdl", } diff --git a/requirements.txt b/requirements.txt index 5c04ef39..6277c633 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ cf-units>=2 cftime>=1.1.0 +importlib-resources # drop this when dropping Python 3.8 isodate>=0.6.1 jinja2>=2.7.3 lxml>=3.2.1