Skip to content

Commit

Permalink
Merge pull request #161 from satra/patch-1
Browse files Browse the repository at this point in the history
add compressor option to write image
  • Loading branch information
sbesson authored Feb 8, 2022
2 parents 19cebd5 + 0d9e868 commit e30d533
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 3 deletions.
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[settings]
known_third_party = dask,numpy,pytest,scipy,setuptools,skimage,zarr
known_third_party = dask,numcodecs,numpy,pytest,scipy,setuptools,skimage,zarr
multi_line_output = 3
include_trailing_comma = True
force_grid_wrap = 0
Expand Down
23 changes: 21 additions & 2 deletions ome_zarr/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ def write_multiscale(
fmt: Format = CurrentFormat(),
axes: Union[str, List[str], List[Dict[str, str]]] = None,
coordinate_transformations: List[List[Dict[str, Any]]] = None,
storage_options: Union[JSONDict, List[JSONDict]] = None,
) -> None:
"""
Write a pyramid with multiscale metadata to disk.
Expand All @@ -198,15 +199,27 @@ def write_multiscale(
For each path, we have a List of transformation Dicts.
Each list of dicts are added to each datasets in order
and must include a 'scale' transform.
storage_options: dict or list of dict
Options to be passed on to the storage backend. A list would need to match
the number of datasets in a multiresolution pyramid. One can provide
different chunk size for each level of a pyramind using this option.
"""

dims = len(pyramid[0].shape)
axes = _get_valid_axes(dims, axes, fmt)

datasets: List[dict] = []
for path, data in enumerate(pyramid):
# TODO: chunks here could be different per layer
group.create_dataset(str(path), data=data, chunks=chunks)
options = {}
if storage_options:
options = (
storage_options
if not isinstance(storage_options, list)
else storage_options[path]
)
if "chunks" not in options:
options["chunks"] = chunks
group.create_dataset(str(path), data=data, **options)
datasets.append({"path": str(path)})

if coordinate_transformations is None:
Expand Down Expand Up @@ -357,6 +370,7 @@ def write_image(
fmt: Format = CurrentFormat(),
axes: Union[str, List[str], List[Dict[str, str]]] = None,
coordinate_transformations: List[List[Dict[str, Any]]] = None,
storage_options: Union[JSONDict, List[JSONDict]] = None,
**metadata: JSONDict,
) -> None:
"""Writes an image to the zarr store according to ome-zarr specification
Expand Down Expand Up @@ -387,6 +401,10 @@ def write_image(
coordinate_transformations: 2Dlist of dict
For each resolution, we have a List of transformation Dicts (not validated).
Each list of dicts are added to each datasets in order.
storage_options: dict or list of dict
Options to be passed on to the storage backend. A list would need to match
the number of datasets in a multiresolution pyramid. One can provide
different chunk size for each level of a pyramind using this option.
"""

if image.ndim > 5:
Expand Down Expand Up @@ -423,6 +441,7 @@ def write_image(
fmt=fmt,
axes=axes,
coordinate_transformations=coordinate_transformations,
storage_options=storage_options,
)
group.attrs.update(metadata)

Expand Down
17 changes: 17 additions & 0 deletions tests/test_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import numpy as np
import pytest
import zarr
from numcodecs import Blosc

from ome_zarr.format import CurrentFormat, FormatV01, FormatV02, FormatV03, FormatV04
from ome_zarr.io import parse_url
Expand Down Expand Up @@ -121,6 +122,22 @@ def test_write_image_current(self):
for value in transfs[0]["scale"]:
assert value >= 1

def test_write_image_compressed(self):
shape = (64, 64, 64)
data = self.create_data(shape)
compressor = Blosc(cname="zstd", clevel=5, shuffle=Blosc.SHUFFLE)
write_image(
data, self.group, axes="zyx", storage_options={"compressor": compressor}
)
group = zarr.open(f"{self.path}/test")
assert group["0"].compressor.get_config() == {
"id": "blosc",
"cname": "zstd",
"clevel": 5,
"shuffle": Blosc.SHUFFLE,
"blocksize": 0,
}

def test_validate_coordinate_transforms(self):

fmt = FormatV04()
Expand Down

0 comments on commit e30d533

Please sign in to comment.