diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cec164d5..51b4af30 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -13,13 +13,13 @@ repos: exclude: "docs/" - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 23.11.0 hooks: - id: black args: ["--line-length", "100"] - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 exclude: pangeo_forge_recipes/recipes @@ -30,7 +30,7 @@ repos: - id: seed-isort-config - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v0.991' + rev: 'v1.7.0' hooks: - id: mypy exclude: tests,pangeo_forge_recipes/recipes @@ -42,6 +42,6 @@ repos: args: ["--profile", "black"] - repo: https://github.com/rstcheck/rstcheck - rev: v6.1.1 + rev: v6.2.0 hooks: - id: rstcheck diff --git a/pangeo_forge_recipes/transforms.py b/pangeo_forge_recipes/transforms.py index 3f4c390a..49bbd0c8 100644 --- a/pangeo_forge_recipes/transforms.py +++ b/pangeo_forge_recipes/transforms.py @@ -379,7 +379,6 @@ def expand(self, pcoll: beam.PCollection) -> beam.PCollection: @dataclass class StoreDatasetFragments(beam.PTransform): - target_store: beam.PCollection # side input def expand(self, pcoll: beam.PCollection) -> beam.PCollection: diff --git a/pangeo_forge_recipes/writers.py b/pangeo_forge_recipes/writers.py index 457495fe..10cabbca 100644 --- a/pangeo_forge_recipes/writers.py +++ b/pangeo_forge_recipes/writers.py @@ -124,7 +124,6 @@ def write_combined_reference( f.write(ujson.dumps(multi_kerchunk).encode()) elif file_ext == ".parquet": - # Creates empty parquet store to be written to if full_target.exists(output_file_name): full_target.rm(output_file_name, recursive=True) diff --git a/tests/conftest.py b/tests/conftest.py index bf0238df..6fc781df 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -154,7 +154,6 @@ def get_open_port(): def start_http_server(paths, request, username=None, password=None, required_query_string=None): - first_path = paths[0] # assume that all files are in the same directory basedir = os.path.dirname(first_path) diff --git a/tests/http_auth_server.py b/tests/http_auth_server.py index c82fc650..50209afd 100644 --- a/tests/http_auth_server.py +++ b/tests/http_auth_server.py @@ -13,7 +13,6 @@ @click.option("--password") @click.option("--required-query-string") def serve_forever(address, port, username, password, required_query_string): - port = int(port) class Handler(http.server.SimpleHTTPRequestHandler): diff --git a/tests/test_aggregation.py b/tests/test_aggregation.py index eceaffb0..4fe5f7b3 100644 --- a/tests/test_aggregation.py +++ b/tests/test_aggregation.py @@ -86,7 +86,6 @@ def test_schema_to_template_ds_cftime(): def test_schema_to_template_ds_attrs(): - attrs = {"test_attr_key": "test_attr_value"} ds = xr.decode_cf( xr.DataArray( diff --git a/tests/test_combiners.py b/tests/test_combiners.py index 350e5c35..b469fc3a 100644 --- a/tests/test_combiners.py +++ b/tests/test_combiners.py @@ -124,12 +124,12 @@ def test_NestDim(schema_pcoll_concat_merge, pipeline): pattern, _, pcoll = schema_pcoll_concat_merge pattern_merge_only = FilePattern( pattern.format_function, - *[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.MERGE] + *[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.MERGE], ) merge_only_indexes = list(pattern_merge_only) pattern_concat_only = FilePattern( pattern.format_function, - *[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.CONCAT] + *[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.CONCAT], ) concat_only_indexes = list(pattern_concat_only) diff --git a/tests/test_openers.py b/tests/test_openers.py index a4f0435f..9f0b6d3e 100644 --- a/tests/test_openers.py +++ b/tests/test_openers.py @@ -161,7 +161,6 @@ def test_direct_open_with_xarray(public_url_and_type, load, xarray_open_kwargs): def is_valid_inline_threshold(): def _is_valid_inline_threshold(references): - assert isinstance(references[0][0]["refs"]["lat/0"], list) return _is_valid_inline_threshold diff --git a/tests/test_patterns.py b/tests/test_patterns.py index 2fe81df7..588cfd8b 100644 --- a/tests/test_patterns.py +++ b/tests/test_patterns.py @@ -87,7 +87,6 @@ def test_pattern_from_file_sequence(): @pytest.mark.parametrize("pickle", [False, True]) def test_file_pattern_concat_merge(runtime_secrets, pickle, concat_merge_pattern_with_kwargs): - fp, times, varnames, format_function, kwargs = concat_merge_pattern_with_kwargs if runtime_secrets: @@ -155,7 +154,6 @@ def test_incompatible_kwargs(): @pytest.mark.parametrize("nkeep", [1, 2]) def test_prune(nkeep, concat_merge_pattern_with_kwargs, runtime_secrets): - fp = concat_merge_pattern_with_kwargs[0] if runtime_secrets: @@ -190,7 +188,6 @@ def get_kwargs(file_pattern): @pytest.mark.parametrize("file_type_value", [ft.value for ft in list(FileType)] + ["unsupported"]) def test_setting_file_types(file_type_value): - file_type_kwargs = {"file_type": file_type_value} if not file_type_value == "unsupported": diff --git a/tests/test_rechunking.py b/tests/test_rechunking.py index 8c43ed39..f3d72a82 100644 --- a/tests/test_rechunking.py +++ b/tests/test_rechunking.py @@ -226,7 +226,6 @@ def test_combine_fragments_multidim(time_chunk, lat_chunk): def test_combine_fragments_errors(): - ds = make_ds(nt=1) group = (("time", 0),) # not actually used diff --git a/tests/test_serialization.py b/tests/test_serialization.py index eeeb1ae8..2f4869f1 100644 --- a/tests/test_serialization.py +++ b/tests/test_serialization.py @@ -42,7 +42,6 @@ def base_pattern(end_date): def get_new_pattern_with_next_url(end_date, nitems_per_file): - fmt = "%Y-%m-%d" def increment_end_date(ndays): diff --git a/tests/test_transforms.py b/tests/test_transforms.py index e40f77c1..0de43e1c 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -152,7 +152,6 @@ def test_OpenWithKerchunk_direct(pattern_direct, pipeline): @pytest.mark.parametrize("target_chunks", [{}, {"time": 1}, {"time": 2}, {"time": 2, "lon": 9}]) def test_PrepareZarrTarget(pipeline, tmp_target_url, target_chunks): - ds = make_ds() schema = dataset_to_schema(ds) diff --git a/tests/test_writers.py b/tests/test_writers.py index f5c68649..2f452725 100644 --- a/tests/test_writers.py +++ b/tests/test_writers.py @@ -16,7 +16,6 @@ def temp_store(tmp_path): def test_store_dataset_fragment(temp_store): - ds = make_ds(non_dim_coords=True) schema = ds.to_dict(data=False, encoding=True) schema["chunks"] = {}