Skip to content

Commit

Permalink
Additional system tests (#16)
Browse files Browse the repository at this point in the history
* Add shape system test

* Re-add shapes test

* Use unit test data in system tests for shape

* Parameterize info tests

* Parse byte strings in metadata

* Add tree system tests

* Add slice system tests

* Make test client a fixture

* Fix formatting
  • Loading branch information
callumforrester committed Jun 23, 2022
1 parent d039cea commit 76c6409
Show file tree
Hide file tree
Showing 7 changed files with 111 additions and 29 deletions.
17 changes: 16 additions & 1 deletion src/hdf5_reader_service/tasks/metadata.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
from typing import Any, Mapping

import h5py
import numpy as np

from hdf5_reader_service.model import (
ByteOrder,
Expand All @@ -23,7 +26,7 @@ def fetch_metadata(path: str, subpath: str, swmr: bool) -> MetadataNode:

def metadata(node: h5py.HLObject) -> MetadataNode:
name = node.name
attributes = dict(node.attrs)
attributes = _without_bytes(dict(node.attrs))

data = MetadataNode(name=name, attributes=attributes)

Expand All @@ -43,3 +46,15 @@ def metadata(node: h5py.HLObject) -> MetadataNode:
data.structure = structure

return data


def _without_bytes(mapping: Mapping[str, Any]) -> Mapping[str, Any]:
def handle_value(value: Any) -> Any:
if isinstance(value, dict):
return _without_bytes(value)
elif isinstance(value, bytes) or isinstance(value, np.bytes_):
return value.decode("utf-8")
else:
return value

return {key: handle_value(value) for key, value in mapping.items()}
Empty file added tests/__init__.py
Empty file.
4 changes: 2 additions & 2 deletions tests/tasks/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@
"/": MetadataNode(
name="/",
attributes={
"file_name": b"/scratch/ryi58813/gda-master-tiled/gda_data_non_live/2022/0-0/p45-104.nxs"
"file_name": "/scratch/ryi58813/gda-master-tiled/gda_data_non_live/2022/0-0/p45-104.nxs" # noqa: E501
},
),
"/entry": MetadataNode(
name="/entry", attributes={"NX_class": b"NXentry", "default": b"DIFFRACTION"}
name="/entry", attributes={"NX_class": "NXentry", "default": "DIFFRACTION"}
),
"/entry/DIFFRACTION/data": MetadataNode(
name="/entry/data",
Expand Down
3 changes: 2 additions & 1 deletion tests/tasks/test_shapes.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from pathlib import Path
from typing import Mapping

import pytest

Expand All @@ -11,7 +12,7 @@
)
from hdf5_reader_service.tasks import fetch_shapes

TEST_CASES = {
TEST_CASES: Mapping[str, DataTree[ShapeMetadata]] = {
"/entry/sample/name": DataTree(
name="name", valid=True, node=ValidNode(contents=ShapeMetadata(), subnodes=[])
),
Expand Down
4 changes: 1 addition & 3 deletions tests/tasks/test_slice.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import sys
import time
from pathlib import Path
from typing import Callable, Mapping
from typing import Mapping

import h5py as h5
import numpy as np
import pytest

Expand Down
25 changes: 12 additions & 13 deletions tests/tasks/test_tree.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from pathlib import Path
from typing import Mapping

import numpy as np
import pytest

from hdf5_reader_service.model import (
Expand All @@ -12,12 +12,11 @@
InvalidNode,
InvalidNodeReason,
MetadataNode,
ShapeMetadata,
ValidNode,
)
from hdf5_reader_service.tasks import fetch_tree

TEST_CASES = {
TEST_CASES: Mapping[str, DataTree[MetadataNode]] = {
"/entry/sample/name": DataTree(
name="name",
valid=True,
Expand All @@ -41,7 +40,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/sample",
attributes={"NX_class": b"NXsample"},
attributes={"NX_class": "NXsample"},
structure=None,
),
subnodes=[
Expand Down Expand Up @@ -92,7 +91,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/diamond_scan",
attributes={"NX_class": b"NXcollection"},
attributes={"NX_class": "NXcollection"},
structure=None,
),
subnodes=[
Expand All @@ -103,8 +102,8 @@
contents=MetadataNode(
name="/entry/diamond_scan/duration",
attributes={
"target": b"/entry/diamond_scan/duration",
"units": b"ms",
"target": "/entry/diamond_scan/duration",
"units": "ms",
},
structure=DatasetStructure(
macro=DatasetMacroStructure(shape=(), chunks=None),
Expand All @@ -122,7 +121,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/diamond_scan/end_time",
attributes={"target": b"/entry/diamond_scan/end_time"},
attributes={"target": "/entry/diamond_scan/end_time"},
structure=DatasetStructure(
macro=DatasetMacroStructure(shape=(), chunks=None),
micro=DatasetMicroStructure(
Expand All @@ -141,7 +140,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/diamond_scan/keys",
attributes={"NX_class": b"NXcollection"},
attributes={"NX_class": "NXcollection"},
structure=None,
),
subnodes=[
Expand Down Expand Up @@ -181,7 +180,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/diamond_scan/scan_dead_time",
attributes={"units": b"ms"},
attributes={"units": "ms"},
structure=DatasetStructure(
macro=DatasetMacroStructure(shape=(), chunks=None),
micro=DatasetMicroStructure(
Expand Down Expand Up @@ -217,7 +216,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/diamond_scan/scan_estimated_duration",
attributes={"units": b"ms"},
attributes={"units": "ms"},
structure=DatasetStructure(
macro=DatasetMacroStructure(shape=(), chunks=None),
micro=DatasetMicroStructure(
Expand Down Expand Up @@ -306,7 +305,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/diamond_scan/scan_shape",
attributes={"target": b"/entry/diamond_scan/scan_shape"},
attributes={"target": "/entry/diamond_scan/scan_shape"},
structure=DatasetStructure(
macro=DatasetMacroStructure(shape=(2,), chunks=None),
micro=DatasetMicroStructure(
Expand All @@ -323,7 +322,7 @@
node=ValidNode(
contents=MetadataNode(
name="/entry/diamond_scan/start_time",
attributes={"target": b"/entry/diamond_scan/start_time"},
attributes={"target": "/entry/diamond_scan/start_time"},
structure=DatasetStructure(
macro=DatasetMacroStructure(shape=(), chunks=None),
micro=DatasetMicroStructure(
Expand Down
87 changes: 78 additions & 9 deletions tests/test_system.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,100 @@
import os
from pathlib import Path

import numpy as np
import pytest
from fastapi.testclient import TestClient

from hdf5_reader_service.main import app
from hdf5_reader_service.model import (
DataTree,
MetadataNode,
NodeChildren,
ShapeMetadata,
)
from tests.tasks.test_metadata import TEST_CASES as METADATA_TEST_CASES
from tests.tasks.test_search import TEST_CASES as SEARCH_TEST_CASES
from tests.tasks.test_shapes import TEST_CASES as SHAPE_TEST_CASES
from tests.tasks.test_slice import TEST_CASES as SLICE_TEST_CASES
from tests.tasks.test_tree import TEST_CASES as TREE_TEST_CASES

client = TestClient(app)

@pytest.fixture
def client() -> TestClient:
return TestClient(app)

def test_read_main():

def test_read_main(
client: TestClient,
):
response = client.get("/")
assert response.status_code == 200
assert response.json() == {
"INFO": "Please provide a path to the HDF5 file, e.g. '/file/<path>'."
}


def test_read_tree(test_data_path: Path):
response = client.get(f"/tree/?path={test_data_path}")
@pytest.mark.parametrize("subpath,shape", SHAPE_TEST_CASES.items())
def test_read_shapes(
client: TestClient,
test_data_path: Path,
subpath: str,
shape: DataTree[ShapeMetadata],
):
response = client.get(
"/shapes/", params={"path": test_data_path, "subpath": subpath}
)
assert response.status_code == 200
actual_shape = DataTree[ShapeMetadata].parse_obj(response.json())
assert actual_shape == shape


@pytest.mark.parametrize("subpath,tree", TREE_TEST_CASES.items())
def test_read_tree(
client: TestClient, test_data_path: Path, subpath: str, tree: DataTree[MetadataNode]
):
response = client.get("/tree/", params={"path": test_data_path, "subpath": subpath})
assert response.status_code == 200
actual_tree = DataTree[MetadataNode].parse_obj(response.json())
assert actual_tree == tree


@pytest.mark.parametrize("subpath,metadata", METADATA_TEST_CASES.items())
def test_read_info(
client: TestClient, test_data_path: Path, subpath: str, metadata: MetadataNode
):
response = client.get("/info/", params={"path": test_data_path, "subpath": subpath})
assert response.status_code == 200
actual_metadata = MetadataNode.parse_obj(response.json())
assert actual_metadata == metadata


def test_read_info(test_data_path: Path):
response = client.get(f"/info/?path={test_data_path}")
@pytest.mark.parametrize("subpath,children", SEARCH_TEST_CASES.items())
def test_read_search(
client: TestClient, test_data_path: Path, subpath: str, children: NodeChildren
):
response = client.get(
"/search/", params={"path": test_data_path, "subpath": subpath}
)
assert response.status_code == 200
actual_children = NodeChildren.parse_obj(response.json())
assert actual_children == children


def test_read_search(test_data_path: Path):
response = client.get(f"/search/?path={test_data_path}")
@pytest.mark.parametrize("slice_info,expected_array", SLICE_TEST_CASES.items())
def test_read_slice(
client: TestClient,
test_data_path: Path,
slice_info: str,
expected_array: np.ndarray,
):
response = client.get(
"/slice/",
params={
"path": test_data_path,
"subpath": "/entry/DIFFRACTION/data",
"slice_info": slice_info,
},
)
assert response.status_code == 200
data_slice = np.array(response.json())
np.testing.assert_array_equal(data_slice, expected_array)

0 comments on commit 76c6409

Please sign in to comment.