Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #543

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
repos:

- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
Expand All @@ -13,13 +13,13 @@ repos:
exclude: "docs/"

- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.11.0
hooks:
- id: black
args: ["--line-length", "100"]

- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
rev: 6.1.0
hooks:
- id: flake8
exclude: pangeo_forge_recipes/recipes
Expand All @@ -30,7 +30,7 @@ repos:
- id: seed-isort-config

- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v0.991'
rev: 'v1.7.0'
hooks:
- id: mypy
exclude: tests,pangeo_forge_recipes/recipes
Expand All @@ -42,6 +42,6 @@ repos:
args: ["--profile", "black"]

- repo: https://github.com/rstcheck/rstcheck
rev: v6.1.1
rev: v6.2.0
hooks:
- id: rstcheck
1 change: 0 additions & 1 deletion pangeo_forge_recipes/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,7 +379,6 @@ def expand(self, pcoll: beam.PCollection) -> beam.PCollection:

@dataclass
class StoreDatasetFragments(beam.PTransform):

target_store: beam.PCollection # side input

def expand(self, pcoll: beam.PCollection) -> beam.PCollection:
Expand Down
1 change: 0 additions & 1 deletion pangeo_forge_recipes/writers.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,6 @@ def write_combined_reference(
f.write(ujson.dumps(multi_kerchunk).encode())

elif file_ext == ".parquet":

# Creates empty parquet store to be written to
if full_target.exists(output_file_name):
full_target.rm(output_file_name, recursive=True)
Expand Down
1 change: 0 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,6 @@ def get_open_port():


def start_http_server(paths, request, username=None, password=None, required_query_string=None):

first_path = paths[0]
# assume that all files are in the same directory
basedir = os.path.dirname(first_path)
Expand Down
1 change: 0 additions & 1 deletion tests/http_auth_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
@click.option("--password")
@click.option("--required-query-string")
def serve_forever(address, port, username, password, required_query_string):

port = int(port)

class Handler(http.server.SimpleHTTPRequestHandler):
Expand Down
1 change: 0 additions & 1 deletion tests/test_aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ def test_schema_to_template_ds_cftime():


def test_schema_to_template_ds_attrs():

attrs = {"test_attr_key": "test_attr_value"}
ds = xr.decode_cf(
xr.DataArray(
Expand Down
4 changes: 2 additions & 2 deletions tests/test_combiners.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,12 +124,12 @@ def test_NestDim(schema_pcoll_concat_merge, pipeline):
pattern, _, pcoll = schema_pcoll_concat_merge
pattern_merge_only = FilePattern(
pattern.format_function,
*[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.MERGE]
*[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.MERGE],
)
merge_only_indexes = list(pattern_merge_only)
pattern_concat_only = FilePattern(
pattern.format_function,
*[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.CONCAT]
*[cdim for cdim in pattern.combine_dims if cdim.operation == CombineOp.CONCAT],
)
concat_only_indexes = list(pattern_concat_only)

Expand Down
1 change: 0 additions & 1 deletion tests/test_openers.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,6 @@ def test_direct_open_with_xarray(public_url_and_type, load, xarray_open_kwargs):

def is_valid_inline_threshold():
def _is_valid_inline_threshold(references):

assert isinstance(references[0][0]["refs"]["lat/0"], list)

return _is_valid_inline_threshold
Expand Down
3 changes: 0 additions & 3 deletions tests/test_patterns.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ def test_pattern_from_file_sequence():

@pytest.mark.parametrize("pickle", [False, True])
def test_file_pattern_concat_merge(runtime_secrets, pickle, concat_merge_pattern_with_kwargs):

fp, times, varnames, format_function, kwargs = concat_merge_pattern_with_kwargs

if runtime_secrets:
Expand Down Expand Up @@ -155,7 +154,6 @@ def test_incompatible_kwargs():

@pytest.mark.parametrize("nkeep", [1, 2])
def test_prune(nkeep, concat_merge_pattern_with_kwargs, runtime_secrets):

fp = concat_merge_pattern_with_kwargs[0]

if runtime_secrets:
Expand Down Expand Up @@ -190,7 +188,6 @@ def get_kwargs(file_pattern):

@pytest.mark.parametrize("file_type_value", [ft.value for ft in list(FileType)] + ["unsupported"])
def test_setting_file_types(file_type_value):

file_type_kwargs = {"file_type": file_type_value}

if not file_type_value == "unsupported":
Expand Down
1 change: 0 additions & 1 deletion tests/test_rechunking.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,6 @@ def test_combine_fragments_multidim(time_chunk, lat_chunk):


def test_combine_fragments_errors():

ds = make_ds(nt=1)
group = (("time", 0),) # not actually used

Expand Down
1 change: 0 additions & 1 deletion tests/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ def base_pattern(end_date):


def get_new_pattern_with_next_url(end_date, nitems_per_file):

fmt = "%Y-%m-%d"

def increment_end_date(ndays):
Expand Down
1 change: 0 additions & 1 deletion tests/test_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,6 @@ def test_OpenWithKerchunk_direct(pattern_direct, pipeline):

@pytest.mark.parametrize("target_chunks", [{}, {"time": 1}, {"time": 2}, {"time": 2, "lon": 9}])
def test_PrepareZarrTarget(pipeline, tmp_target_url, target_chunks):

ds = make_ds()
schema = dataset_to_schema(ds)

Expand Down
1 change: 0 additions & 1 deletion tests/test_writers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ def temp_store(tmp_path):


def test_store_dataset_fragment(temp_store):

ds = make_ds(non_dim_coords=True)
schema = ds.to_dict(data=False, encoding=True)
schema["chunks"] = {}
Expand Down