Skip to content

Commit

Permalink
Make grib codec a compressor since its bytes to array
Browse files Browse the repository at this point in the history
  • Loading branch information
mpiannucci committed Oct 23, 2024
1 parent 187ced2 commit 690ed21
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 29 deletions.
4 changes: 2 additions & 2 deletions kerchunk/grib2.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ def _store_array(store, z, data, var, inline_threshold, offset, size, attr):
chunks=shape,
dtype=data.dtype,
fill_value=attr.get("missingValue", None),
filters=[GRIBCodec(var=var, dtype=str(data.dtype))],
compressor=None,
filters=[],
compressor=GRIBCodec(var=var, dtype=str(data.dtype)),
)
store[f"{var}/" + ".".join(["0"] * len(shape))] = ["{{u}}", offset, size]
d.attrs.update(attr)
Expand Down
54 changes: 27 additions & 27 deletions kerchunk/tests/test_grib.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import pandas as pd
import pytest
import xarray as xr
import datatree
#import datatree
import zarr
import ujson
from kerchunk.grib2 import (
Expand Down Expand Up @@ -263,22 +263,22 @@ def test_hrrr_sfcf_grib_tree():
assert zg.u.instant.isobaricInhPa.time.shape == (1,)


def test_hrrr_sfcf_grib_datatree():
fpath = os.path.join(here, "hrrr.wrfsfcf.subset.json")
with open(fpath, "rb") as fobj:
scanned_msgs = ujson.load(fobj)
merged = grib_tree(scanned_msgs)
dt = datatree.open_datatree(
fsspec.filesystem("reference", fo=merged).get_mapper(""),
engine="zarr",
consolidated=False,
)
# Assert a few things... but if it loads we are mostly done.
np.testing.assert_array_equal(
dt.u.instant.heightAboveGround.step.values[:],
np.array([0, 3600 * 10**9], dtype="timedelta64[ns]"),
)
assert dt.u.attrs == dict(name="U component of wind")
# def test_hrrr_sfcf_grib_datatree():
# fpath = os.path.join(here, "hrrr.wrfsfcf.subset.json")
# with open(fpath, "rb") as fobj:
# scanned_msgs = ujson.load(fobj)
# merged = grib_tree(scanned_msgs)
# dt = datatree.open_datatree(
# fsspec.filesystem("reference", fo=merged).get_mapper(""),
# engine="zarr",
# consolidated=False,
# )
# # Assert a few things... but if it loads we are mostly done.
# np.testing.assert_array_equal(
# dt.u.instant.heightAboveGround.step.values[:],
# np.array([0, 3600 * 10**9], dtype="timedelta64[ns]"),
# )
# assert dt.u.attrs == dict(name="U component of wind")


def test_parse_grib_idx_invalid_url():
Expand Down Expand Up @@ -342,17 +342,17 @@ def test_parse_grib_idx_content(idx_url, storage_options):
assert idx_df.iloc[message_no]["length"] == output[message_no]["refs"][variable][2]


@pytest.fixture
def zarr_tree_and_datatree_instance():
fn = os.path.join(here, "gfs.t00z.pgrb2.0p25.f006.test-limit-100")
tree_store = tree_store = grib_tree(scan_grib(fn))
dt_instance = datatree.open_datatree(
fsspec.filesystem("reference", fo=tree_store).get_mapper(""),
engine="zarr",
consolidated=False,
)
# @pytest.fixture
# def zarr_tree_and_datatree_instance():
# fn = os.path.join(here, "gfs.t00z.pgrb2.0p25.f006.test-limit-100")
# tree_store = tree_store = grib_tree(scan_grib(fn))
# dt_instance = datatree.open_datatree(
# fsspec.filesystem("reference", fo=tree_store).get_mapper(""),
# engine="zarr",
# consolidated=False,
# )

return tree_store, dt_instance, fn
# return tree_store, dt_instance, fn


def test_extract_dataset_chunk_index(zarr_tree_and_datatree_instance):
Expand Down

0 comments on commit 690ed21

Please sign in to comment.