Skip to content

Commit

Permalink
Issue #404/PR#481 test_datacube100 streamlining
Browse files Browse the repository at this point in the history
- use `dummy_backend` for more compact setup
- cover more combinations of server-side support, `auto_validate` and explicit `validate`
- leverage fixture parameterization more to avoid custom fixtures
  • Loading branch information
soxofaan committed Oct 18, 2023
1 parent 788a954 commit dd4804c
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 125 deletions.
2 changes: 1 addition & 1 deletion openeo/rest/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -1568,7 +1568,7 @@ def execute(
json=pg_with_metadata,
expected_status=200,
timeout=timeout or DEFAULT_TIMEOUT_SYNCHRONOUS_EXECUTE,
).json()
).json() # TODO: only do JSON decoding when mimetype is actually JSON?

def create_job(
self,
Expand Down
18 changes: 4 additions & 14 deletions tests/rest/datacube/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,29 +67,19 @@ def setup_collection_metadata(requests_mock, cid: str, bands: List[str]):
})


@pytest.fixture
def support_udp() -> bool:
"""Per-test overridable `build_capabilities_kwargs(udp=...)` value for connection fixtures"""
return False


@pytest.fixture
def connection(api_version, requests_mock) -> Connection:
def connection(api_version, requests_mock, api_capabilities) -> Connection:
"""Connection fixture to a backend of given version with some image collections."""
return _setup_connection(api_version, requests_mock)
return _setup_connection(api_version, requests_mock, build_capabilities_kwargs=api_capabilities)


@pytest.fixture
def con100(requests_mock, support_udp) -> Connection:
def con100(requests_mock, api_capabilities) -> Connection:
"""Connection fixture to a 1.0.0 backend with some image collections."""
return _setup_connection("1.0.0", requests_mock, build_capabilities_kwargs={"udp": support_udp})
return _setup_connection("1.0.0", requests_mock, build_capabilities_kwargs=api_capabilities)


@pytest.fixture
def connection_with_pgvalidation_datacube(api_version, requests_mock) -> Connection:
"""Connection fixture to a backend that supports validation of the process graph."""
return _setup_connection("1.0.0", requests_mock, build_capabilities_kwargs={"udp": support_udp, "validation": True})


@pytest.fixture
def s2cube(connection, api_version) -> DataCube:
Expand Down
190 changes: 123 additions & 67 deletions tests/rest/datacube/test_datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,10 @@
import shapely.geometry

from openeo.rest import BandMathException
from openeo.rest._testing import build_capabilities
from openeo.rest.connection import Connection
from openeo.rest.datacube import DataCube
from openeo.util import dict_no_none

from ... import load_json_resource
from .. import get_download_graph
Expand Down Expand Up @@ -812,94 +814,148 @@ def test_save_result_format_options_vs_execute_batch(elf, s2cube, get_create_job
}


class TestProcessGraphValidation:
JOB_ID = "j-123"
PROCESS_GRAPH_DICT = {"add1": {"process_id": "add", "arguments": {"x": 3, "y": 5}, "result": True}}
PROCESS_GRAPH_STRING = json.dumps(PROCESS_GRAPH_DICT)
class TestDataCubeValidation:
"""
Test (auto) validation of datacube execution with `download`, `execute`, ...
"""

@pytest.fixture
def cube_add(self, requests_mock, connection_with_pgvalidation_datacube: Connection) -> DataCube:
requests_mock.post(API_URL + "/result", content=self._post_result_handler_json)
return connection_with_pgvalidation_datacube.datacube_from_json(self.PROCESS_GRAPH_STRING)
_PG_S2 = {
"loadcollection1": {
"process_id": "load_collection",
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
"result": True,
},
}
_PG_S2_SAVE = {
"loadcollection1": {
"process_id": "load_collection",
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
},
"saveresult1": {
"process_id": "save_result",
"arguments": {"data": {"from_node": "loadcollection1"}, "format": "GTiff", "options": {}},
"result": True,
},
}

def _post_jobs_handler_json(self, response: requests.Request, context):
context.headers["OpenEO-Identifier"] = self.JOB_ID
return b""
@pytest.fixture(params=[False, True])
def auto_validate(self, request) -> bool:
"""Fixture to parametrize auto_validate setting."""
return request.param

def _post_result_handler_json(self, response: requests.Request, context):
pg = response.json()["process"]["process_graph"]
assert pg == self.PROCESS_GRAPH_DICT
return b'{"answer": 8}'
@pytest.fixture
def connection(self, api_version, requests_mock, api_capabilities, auto_validate) -> Connection:
requests_mock.get(API_URL, json=build_capabilities(api_version=api_version, **api_capabilities))
con = Connection(API_URL, **dict_no_none(auto_validate=auto_validate))
return con

@pytest.fixture(autouse=True)
def dummy_backend_setup(self, dummy_backend):
dummy_backend.next_validation_errors = [{"code": "NoAdd", "message": "Don't add numbers"}]


# Reusable list of (fixture) parameterization
# of ["api_capabilities", "auto_validate", "validate", "validation_expected"]
_VALIDATION_PARAMETER_SETS = [
# No validation supported by backend: don't attempt to validate
({}, None, None, False),
({}, True, True, False),
# Validation supported by backend, default behavior -> validate
({"validation": True}, None, None, True),
# (Validation supported by backend) no explicit validation enabled: follow auto_validate setting
({"validation": True}, True, None, True),
({"validation": True}, False, None, False),
# (Validation supported by backend) follow explicit `validate` toggle regardless of auto_validate
({"validation": True}, False, True, True),
({"validation": True}, True, False, False),
# TODO: add case with broken validation
]

@pytest.mark.parametrize("validate", [True, False])
def test_create_job_with_pg_validation(
self,
requests_mock,
connection_with_pgvalidation_datacube: Connection,
validate,
):
@pytest.mark.parametrize(
["api_capabilities", "auto_validate", "validate", "validation_expected"],
_VALIDATION_PARAMETER_SETS,
)
def test_cube_download_validation(self, dummy_backend, connection, validate, validation_expected, caplog, tmp_path):
"""The DataCube should pass through request for the validation to the
connection and the validation endpoint should only be called when
validation was requested.
"""
m = requests_mock.post(API_URL + "/validation", json={"errors": []})
cube = connection.load_collection("S2")

requests_mock.post(API_URL + "/jobs", status_code=201, content=self._post_jobs_handler_json)
cube: DataCube = connection_with_pgvalidation_datacube.load_collection("S2")
cube.create_job(validate=validate)
output = tmp_path / "result.tiff"
cube.download(outputfile=output, **dict_no_none(validate=validate))
assert output.read_bytes() == b'{"what?": "Result data"}'
assert dummy_backend.get_sync_pg() == self._PG_S2_SAVE

# Validation should be called if and only if it was requested
expected_call_count = 1 if validate else 0
assert m.call_count == expected_call_count
if validation_expected:
assert dummy_backend.validation_requests == [self._PG_S2_SAVE]
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
else:
assert dummy_backend.validation_requests == []
assert caplog.messages == []

@pytest.mark.parametrize("validate", [True, False])
def test_execute_with_pg_validation(
self,
requests_mock,
cube_add: DataCube,
validate,
):
@pytest.mark.parametrize(
["api_capabilities", "auto_validate", "validate", "validation_expected"],
_VALIDATION_PARAMETER_SETS,
)
def test_cube_execute_validation(self, dummy_backend, connection, validate, validation_expected, caplog):
"""The DataCube should pass through request for the validation to the
connection and the validation endpoint should only be called when
validation was requested.
"""
m = requests_mock.post(API_URL + "/validation", json={"errors": []})
requests_mock.post(API_URL + "/jobs", status_code=201, content=self._post_jobs_handler_json)
requests_mock.post(API_URL + "/result", content=self._post_result_handler_json)
cube = connection.load_collection("S2")

cube_add.execute(validate=validate)
res = cube.execute(**dict_no_none(validate=validate))
assert res == {"what?": "Result data"}
assert dummy_backend.get_sync_pg() == self._PG_S2

# Validation should be called if and only if it was requested
expected_call_count = 1 if validate else 0
assert m.call_count == expected_call_count
if validation_expected:
assert dummy_backend.validation_requests == [self._PG_S2]
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
else:
assert dummy_backend.validation_requests == []
assert caplog.messages == []

@pytest.mark.parametrize("validate", [True, False])
def test_execute_batch_with_pg_validation(
self,
requests_mock,
cube_add: DataCube,
validate,
@pytest.mark.parametrize(
["api_capabilities", "auto_validate", "validate", "validation_expected"],
_VALIDATION_PARAMETER_SETS,
)
def test_cube_create_job_validation(
self, dummy_backend, connection: Connection, validate, validation_expected, caplog
):
"""The DataCube should pass through request for the validation to the
connection and the validation endpoint should only be called when
validation was requested.
"""
m = requests_mock.post(API_URL + "/validation", json={"errors": []})
requests_mock.post(API_URL + "/jobs", status_code=201, content=self._post_jobs_handler_json)
requests_mock.post(API_URL + f"/jobs/{self.JOB_ID}/results", status_code=202)
job_metadata = {
"id": self.JOB_ID,
"title": f"Job {self.JOB_ID,}",
"description": f"Job {self.JOB_ID,}",
"process": self.PROCESS_GRAPH_DICT,
"status": "finished",
"created": "2017-01-01T09:32:12Z",
"links": [],
}
requests_mock.get(API_URL + f"/jobs/{self.JOB_ID}", status_code=200, json=job_metadata)

cube_add.execute_batch(validate=validate)
cube = connection.load_collection("S2")
job = cube.create_job(**dict_no_none(validate=validate))
assert job.job_id == "job-000"
assert dummy_backend.get_batch_pg() == self._PG_S2_SAVE

if validation_expected:
assert dummy_backend.validation_requests == [self._PG_S2_SAVE]
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
else:
assert dummy_backend.validation_requests == []
assert caplog.messages == []

# Validation should be called if and only if it was requested
expected_call_count = 1 if validate else 0
assert m.call_count == expected_call_count
@pytest.mark.parametrize(
["api_capabilities", "auto_validate", "validate", "validation_expected"],
_VALIDATION_PARAMETER_SETS,
)
def test_cube_execute_batch_validation(self, dummy_backend, connection, validate, validation_expected, caplog):
"""The DataCube should pass through request for the validation to the
connection and the validation endpoint should only be called when
validation was requested.
"""
cube = connection.load_collection("S2")
job = cube.execute_batch(**dict_no_none(validate=validate))
assert job.job_id == "job-000"
assert dummy_backend.get_batch_pg() == self._PG_S2_SAVE

if validation_expected:
assert dummy_backend.validation_requests == [self._PG_S2_SAVE]
assert caplog.messages == ["Preflight process graph validation raised: [NoAdd] Don't add numbers"]
else:
assert dummy_backend.validation_requests == []
assert caplog.messages == []
6 changes: 2 additions & 4 deletions tests/rest/datacube/test_datacube100.py
Original file line number Diff line number Diff line change
Expand Up @@ -1931,9 +1931,8 @@ def test_custom_process_arguments_namespacd(con100: Connection):
assert res.flat_graph() == expected


@pytest.mark.parametrize("support_udp", [True])
@pytest.mark.parametrize("api_capabilities", [{"support_udp": True}])
def test_save_user_defined_process(con100, requests_mock):
requests_mock.get(API_URL + "/", json=build_capabilities(udp=True))
requests_mock.get(API_URL + "/processes", json={"processes": [{"id": "add"}]})

expected_body = load_json_resource("data/1.0.0/save_user_defined_process.json")
Expand All @@ -1955,9 +1954,8 @@ def check_body(request):
assert adapter.called


@pytest.mark.parametrize("support_udp", [True])
@pytest.mark.parametrize("api_capabilities", [{"support_udp": True}])
def test_save_user_defined_process_public(con100, requests_mock):
requests_mock.get(API_URL + "/", json=build_capabilities(udp=True))
requests_mock.get(API_URL + "/processes", json={"processes": [{"id": "add"}]})

expected_body = load_json_resource("data/1.0.0/save_user_defined_process.json")
Expand Down
58 changes: 19 additions & 39 deletions tests/rest/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -3285,21 +3285,25 @@ def connection(self, api_version, requests_mock, api_capabilities, auto_validate
con = Connection(API_URL, **dict_no_none(auto_validate=auto_validate))
return con

# Reusable list of (fixture) parameterization
# of ["api_capabilities", "auto_validate", "validate", "validation_expected"]
_VALIDATION_PARAMETER_SETS = [
# No validation supported by backend: don't attempt to validate
({}, None, None, False),
({}, True, True, False),
# Validation supported by backend, default behavior -> validate
({"validation": True}, None, None, True),
# (Validation supported by backend) no explicit validation enabled: follow auto_validate setting
({"validation": True}, True, None, True),
({"validation": True}, False, None, False),
# (Validation supported by backend) follow explicit `validate` toggle regardless of auto_validate
({"validation": True}, False, True, True),
({"validation": True}, True, False, False),
]

@pytest.mark.parametrize(
["api_capabilities", "auto_validate", "validate", "validation_expected"],
[
# No validation supported by backend: don't attempt to validate
({}, None, None, False),
({}, True, True, False),
# Validation supported by backend, default behavior -> validate
({"validation": True}, None, None, True),
# (Validation supported by backend) no explicit validation enabled: follow auto_validate setting
({"validation": True}, True, None, True),
({"validation": True}, False, None, False),
# (Validation supported by backend) follow explicit `validate` toggle regardless of auto_validate
({"validation": True}, False, True, True),
({"validation": True}, True, False, False),
],
_VALIDATION_PARAMETER_SETS,
)
def test_download_validation(
self,
Expand Down Expand Up @@ -3362,19 +3366,7 @@ def test_download_validation_broken(

@pytest.mark.parametrize(
["api_capabilities", "auto_validate", "validate", "validation_expected"],
[
# No validation supported by backend: don't attempt to validate
({}, None, None, False),
({}, True, True, False),
# Validation supported by backend, default behavior -> validate
({"validation": True}, None, None, True),
# (Validation supported by backend) no explicit validation enabled: follow auto_validate setting
({"validation": True}, True, None, True),
({"validation": True}, False, None, False),
# (Validation supported by backend) follow explicit `validate` toggle regardless of auto_validate
({"validation": True}, False, True, True),
({"validation": True}, True, False, False),
],
_VALIDATION_PARAMETER_SETS,
)
def test_execute_validation(
self, dummy_backend, connection, caplog, api_capabilities, validate, validation_expected
Expand All @@ -3396,19 +3388,7 @@ def test_execute_validation(

@pytest.mark.parametrize(
["api_capabilities", "auto_validate", "validate", "validation_expected"],
[
# No validation supported by backend: don't attempt to validate
({}, None, None, False),
({}, True, True, False),
# Validation supported by backend, default behavior -> validate
({"validation": True}, None, None, True),
# (Validation supported by backend) no explicit validation enabled: follow auto_validate setting
({"validation": True}, True, None, True),
({"validation": True}, False, None, False),
# (Validation supported by backend) follow explicit `validate` toggle regardless of auto_validate
({"validation": True}, False, True, True),
({"validation": True}, True, False, False),
],
_VALIDATION_PARAMETER_SETS,
)
def test_create_job_validation(
self, dummy_backend, connection, caplog, api_capabilities, validate, validation_expected
Expand Down

0 comments on commit dd4804c

Please sign in to comment.