From 2f0aaeb8f83f9435a21c2449a0f375fb06bb1e6c Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Tue, 8 Sep 2020 17:45:55 +0200 Subject: [PATCH 1/9] EP-3521/#149 fix parameter name "x" in "apply" callback --- openeo/rest/datacube.py | 4 +- openeo/rest/processbuilder.py | 4 ++ tests/data/1.0.0/apply_absolute.json | 2 +- tests/rest/datacube/test_datacube100.py | 12 +++++- tests/rest/datacube/test_processbuilder.py | 50 +++++++++++++--------- 5 files changed, 47 insertions(+), 25 deletions(-) diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index e2f626201..622cb565c 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -683,7 +683,7 @@ def apply(self, process: Union[str, PGNode]=None, data_argument='x') -> 'DataCub # Simple single string process specification process = PGNode( process_id=process, - arguments={data_argument: {"from_parameter": "data"}} + arguments={data_argument: {"from_parameter": "x"}} ) result_cube = self.process_with_node(PGNode( process_id='apply', @@ -694,7 +694,7 @@ def apply(self, process: Union[str, PGNode]=None, data_argument='x') -> 'DataCub } )) if isinstance(process, typing.Callable): - builder = ProcessBuilder() + builder = ProcessBuilder(parent_data_parameter="x") callback_graph = process(builder) result_cube.processgraph_node.arguments['process'] = {'process_graph': callback_graph.pgnode} diff --git a/openeo/rest/processbuilder.py b/openeo/rest/processbuilder.py index 0fcefc831..b8de6140e 100644 --- a/openeo/rest/processbuilder.py +++ b/openeo/rest/processbuilder.py @@ -89,3 +89,7 @@ def process(self, process_id: str, arguments: dict = None, **kwargs) -> 'Process arguments = {**(arguments or {}), **kwargs} return ProcessBuilder(PGNode(process_id=process_id, arguments=arguments)) + + +def absolute(data: ProcessBuilder) -> ProcessBuilder: + return data.absolute() diff --git a/tests/data/1.0.0/apply_absolute.json b/tests/data/1.0.0/apply_absolute.json index 6dad72e01..7d4955049 100644 --- a/tests/data/1.0.0/apply_absolute.json +++ b/tests/data/1.0.0/apply_absolute.json @@ -19,7 +19,7 @@ "process_id": "absolute", "arguments": { "x": { - "from_parameter": "data" + "from_parameter": "x" } }, "result": true diff --git a/tests/rest/datacube/test_datacube100.py b/tests/rest/datacube/test_datacube100.py index b9ebd613e..cb70fdcd6 100644 --- a/tests/rest/datacube/test_datacube100.py +++ b/tests/rest/datacube/test_datacube100.py @@ -249,18 +249,26 @@ def test_metadata_load_collection_100(con100, requests_mock): def test_apply_absolute_pgnode(con100): im = con100.load_collection("S2") - result = im.apply(PGNode(process_id="absolute", arguments={"x": {"from_parameter": "data"}})) + result = im.apply(PGNode(process_id="absolute", arguments={"x": {"from_parameter": "x"}})) expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') assert result.graph == expected_graph -def test_apply_absolute_callback(con100): +def test_apply_absolute_callback_lambda_method(con100): im = con100.load_collection("S2") result = im.apply(lambda data: data.absolute()) expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') assert result.graph == expected_graph +def test_apply_absolute_callback_function(con100): + im = con100.load_collection("S2") + from openeo.rest.processbuilder import absolute + result = im.apply(absolute) + expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') + assert result.graph == expected_graph + + def test_load_collection_properties(con100): # TODO: put this somewhere and expose it to the user? def eq(value, case_sensitive=True) -> PGNode: diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index 458aeef60..1643c5532 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -59,29 +59,39 @@ def test_apply_neighborhood_complex_callback(con100): 'result': True} -def test_apply_bandmath(con100): - collection = con100.load_collection("S2") - +def test_apply_dimension_bandmath(con100): from openeo.rest.processbuilder import array_element - bandsum = collection.apply(process=lambda data:array_element(data,index=1) + array_element(data,index=2)) - - actual_graph = bandsum.graph['apply1'] - assert actual_graph == {'arguments': {'data': {'from_node': 'loadcollection1'}, - - 'process': {'process_graph': {'add1': {'arguments': {'x': {'from_node': 'arrayelement1'}, - 'y': {'from_node': 'arrayelement2'}}, - 'process_id': 'add', - 'result': True}, - 'arrayelement1': {'arguments': {'data': {'from_parameter': 'data'}, - 'index': 1}, - 'process_id': 'array_element'}, - 'arrayelement2': {'arguments': {'data': {'from_parameter': 'data'}, - 'index': 2}, - 'process_id': 'array_element'}}}}, - 'process_id': 'apply', - 'result': True} + collection = con100.load_collection("S2") + bandsum = collection.apply_dimension( + process=lambda d: array_element(d, index=1) + array_element(d, index=2), + dimension="bands" + ) + actual_graph = bandsum.graph['applydimension1'] + assert actual_graph == { + 'process_id': 'apply_dimension', + 'arguments': { + 'data': {'from_node': 'loadcollection1'}, + 'dimension': 'bands', + 'process': {'process_graph': { + 'arrayelement1': { + 'process_id': 'array_element', + 'arguments': {'data': {'from_parameter': 'data'}, 'index': 1}, + }, + 'arrayelement2': { + 'process_id': 'array_element', + 'arguments': {'data': {'from_parameter': 'data'}, 'index': 2}, + }, + 'add1': { + 'process_id': 'add', + 'arguments': {'x': {'from_node': 'arrayelement1'}, 'y': {'from_node': 'arrayelement2'}}, + 'result': True + }, + }} + }, + 'result': True + } def test_reduce_dimension(con100): From 717496fa0c6b2e07e35d18d45b7bd6ea2596def0 Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Wed, 9 Sep 2020 21:23:08 +0200 Subject: [PATCH 2/9] EP-3555 add (generator for) all official processes for use in callbacks --- openeo/processes/__init__.py | 0 openeo/processes/builder.py | 37 + openeo/processes/generator.py | 132 + openeo/processes/parse.py | 103 + openeo/processes/processes.py | 3575 ++++++++++++++++++++ openeo/rest/datacube.py | 34 +- openeo/rest/processbuilder.py | 95 - tests/processes/__init__.py | 0 tests/processes/test_generator.py | 123 + tests/processes/test_parse.py | 100 + tests/rest/datacube/test_datacube100.py | 19 +- tests/rest/datacube/test_processbuilder.py | 191 +- 12 files changed, 4212 insertions(+), 197 deletions(-) create mode 100644 openeo/processes/__init__.py create mode 100644 openeo/processes/builder.py create mode 100644 openeo/processes/generator.py create mode 100644 openeo/processes/parse.py create mode 100644 openeo/processes/processes.py delete mode 100644 openeo/rest/processbuilder.py create mode 100644 tests/processes/__init__.py create mode 100644 tests/processes/test_generator.py create mode 100644 tests/processes/test_parse.py diff --git a/openeo/processes/__init__.py b/openeo/processes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/openeo/processes/builder.py b/openeo/processes/builder.py new file mode 100644 index 000000000..5f2b7f4b6 --- /dev/null +++ b/openeo/processes/builder.py @@ -0,0 +1,37 @@ +from typing import Union + +from openeo.internal.graph_building import PGNode + + +UNSET = object() + + +class ProcessBuilderBase: + """ + Base implementation of a builder pattern that allows constructing process graphs + by calling functions. + """ + + def __init__(self, pgnode: Union[PGNode, dict]): + self.pgnode = pgnode + + @classmethod + def from_parameter(cls, parameter: str): + return cls({"from_parameter": parameter}) + + @classmethod + def process(cls, process_id: str, arguments: dict = None, **kwargs) -> 'ProcessBuilderBase': + """ + Apply process, using given arguments + + :param process_id: process id of the process. + :param arguments: argument dictionary for the process. + :return: new ProcessBuilder instance + """ + arguments = {**(arguments or {}), **kwargs} + for arg, value in arguments.items(): + if isinstance(value, ProcessBuilderBase): + arguments[arg] = value.pgnode + for arg in [a for a, v in arguments.items() if v is UNSET]: + del arguments[arg] + return cls(PGNode(process_id=process_id, arguments=arguments)) diff --git a/openeo/processes/generator.py b/openeo/processes/generator.py new file mode 100644 index 000000000..6b21fb939 --- /dev/null +++ b/openeo/processes/generator.py @@ -0,0 +1,132 @@ +import argparse +import sys +import textwrap +from pathlib import Path +from typing import Union, List, Iterator + +from openeo.processes.parse import Process, parse_all_from_dir + + +class PythonRenderer: + """Generator of Python function source code for a given openEO process""" + DEFAULT_WIDTH = 115 + + def __init__( + self, oo_mode=False, indent=" ", body_template="return process({id!r}, {args})", optional_default="None" + ): + self.oo_mode = oo_mode + self.indent = indent + self.body_template = body_template + self.optional_default = optional_default + + def render_process(self, process: Process, prefix: str = "", width: int = DEFAULT_WIDTH) -> str: + + # TODO: add type hints + # TODO: width limit? + def_line = "def {id}({args}):".format( + id=self._safe_name(process.id), + args=", ".join(self._def_arguments(process)) + ) + + call_args = ", ".join( + ["{p}={a}".format(p=p, a=a) for (p, a) in zip(self._par_names(process), self._arg_names(process))] + ) + body = self.indent + self.body_template.format(id=self._safe_name(process.id), args=call_args) + + return textwrap.indent("\n".join([ + def_line, + self.render_docstring(process, width=width - len(prefix), prefix=self.indent), + body + ]), prefix=prefix) + + def _safe_name(self, name: str) -> str: + if name in {'and', 'or', 'if', 'not'}: + name += '_' + return name + + def _par_names(self, process: Process) -> List[str]: + """Names of the openEO process parameters""" + return [self._safe_name(p.name) for p in process.parameters] + + def _arg_names(self, process: Process) -> List[str]: + """Names of the arguments in the python function""" + arg_names = self._par_names(process) + if self.oo_mode: + arg_names = [n if i > 0 else "self" for i, n in enumerate(arg_names)] or ["self"] + return arg_names + + def _def_arguments(self, process: Process) -> Iterator[str]: + # TODO: add argument type hints? + for arg, param in zip(self._arg_names(process), process.parameters): + if param.optional: + yield "{a}={d}".format(a=arg, d=self.optional_default) + elif param.has_default(): + yield "{a}={d!r}".format(a=arg, d=param.default) + else: + yield arg + if self.oo_mode and len(process.parameters) == 0: + yield "self" + + def render_docstring(self, process: Process, prefix="", width: int = DEFAULT_WIDTH) -> str: + w = width - len(prefix) + # TODO: use description instead of summary? + doc = "\n\n".join(textwrap.fill(d, width=w) for d in process.summary.split("\n\n")) + params = "\n".join( + self._hanging_indent(":param {n}: {d}".format(n=arg, d=param.description), width=w) + for arg, param in zip(self._arg_names(process), process.parameters) + ) + returns = self._hanging_indent(":return: {d}".format(d=process.returns.description), width=w) + return textwrap.indent('"""\n' + doc + "\n\n" + (params + "\n\n" + returns).strip() + '\n"""', prefix=prefix) + + def _hanging_indent(self, paragraph: str, indent=" ", width: int = DEFAULT_WIDTH) -> str: + return textwrap.indent(textwrap.fill(paragraph, width=width - len(indent)), prefix=indent).lstrip() + + +def generate_process_py(processes_dir: Union[Path, str], output=sys.stdout): + processes = list(parse_all_from_dir(processes_dir)) + + oo_src = textwrap.dedent(""" + from openeo.processes.builder import ProcessBuilderBase, UNSET + + + class ProcessBuilder(ProcessBuilderBase): + + def __add__(self, other): + return self.add(other) + + + """) + fun_src = textwrap.dedent(""" + # Shortcut + process = ProcessBuilder.process + + + """) + fun_renderer = PythonRenderer(body_template="return process({id!r}, {args})", optional_default="UNSET") + oo_renderer = PythonRenderer(oo_mode=True, body_template="return {id}({args})", optional_default="UNSET") + for p in processes: + fun_src += fun_renderer.render_process(p) + "\n\n\n" + oo_src += oo_renderer.render_process(p, prefix=" ") + "\n\n" + output.write(textwrap.dedent(""" + # This file is automatically generated. + # Do not edit directly. + """)) + output.write(oo_src) + output.write(fun_src) + + +def main(): + # Usage example (from project root, assuming the `openeo-process` repo is checked out as well): + # python openeo/processes//generator.py ../openeo-processes --output openeo/processes/processes.py + arg_parser = argparse.ArgumentParser() + arg_parser.add_argument("dir", help="""Directory that holds openEO process definitions in JSON format""") + arg_parser.add_argument("--output", help="Path to output 'processes.py' file") + + arguments = arg_parser.parse_args() + + with (open(arguments.output, "w", encoding="utf-8") if arguments.output else sys.stdout) as f: + generate_process_py(arguments.dir, output=f) + + +if __name__ == '__main__': + main() diff --git a/openeo/processes/parse.py b/openeo/processes/parse.py new file mode 100644 index 000000000..6870e3c24 --- /dev/null +++ b/openeo/processes/parse.py @@ -0,0 +1,103 @@ +""" +Functionality and tools to process openEO processes. +For example: parse a bunch of JSON descriptions and generate Python (stub) functions. +""" +import json +from pathlib import Path +from typing import List, Union, Iterator + +import requests + + +class Schema: + """Schema description of an openEO process parameter or return value.""" + + def __init__(self, schema: Union[dict, list]): + self.schema = schema + + @classmethod + def from_dict(cls, data: dict) -> 'Schema': + return cls(schema=data) + + +class Parameter: + """openEO process parameter""" + + NO_DEFAULT = object() + + def __init__(self, name: str, description: str, schema: Schema, default=NO_DEFAULT, optional: bool = False): + self.name = name + self.description = description + self.schema = schema + self.default = default + self.optional = optional + + @classmethod + def from_dict(cls, data: dict) -> 'Parameter': + return cls( + name=data["name"], description=data["description"], schema=Schema.from_dict(data["schema"]), + default=data.get("default", cls.NO_DEFAULT), optional=data.get("optional", False) + ) + + def has_default(self): + return self.default is not self.NO_DEFAULT + + +class Returns: + """openEO process return description.""" + + def __init__(self, description: str, schema: Schema): + self.description = description + self.schema = schema + + @classmethod + def from_dict(cls, data: dict) -> 'Returns': + return cls(description=data["description"], schema=Schema.from_dict(data["schema"])) + + +class Process: + """An openEO process""" + + def __init__( + self, id: str, parameters: List[Parameter], returns: Returns, + description: str = "", summary: str = "" + ): + self.id = id + self.description = description + self.parameters = parameters + self.returns = returns + self.summary = summary + # TODO: more properties? + + @classmethod + def from_dict(cls, data: dict) -> 'Process': + """Construct openEO process from dictionary values""" + return cls( + id=data["id"], + parameters=[Parameter.from_dict(d) for d in data["parameters"]], + returns=Returns.from_dict(data["returns"]), + description=data["description"], + summary=data["summary"], + ) + + @classmethod + def from_json(cls, data: str) -> 'Process': + """Parse openEO process JSON description.""" + return cls.from_dict(json.loads(data)) + + @classmethod + def from_json_url(cls, url: str) -> 'Process': + """Parse openEO process JSON description from given URL.""" + return cls.from_dict(requests.get(url).json()) + + @classmethod + def from_json_file(cls, path: Union[str, Path]) -> 'Process': + """Parse openEO process JSON description file.""" + with Path(path).open("r") as f: + return cls.from_json(f.read()) + + +def parse_all_from_dir(path: Union[str, Path], pattern="*.json") -> Iterator[Process]: + """Parse all openEO process files in given directory""" + for p in sorted(Path(path).glob(pattern)): + yield Process.from_json_file(p) diff --git a/openeo/processes/processes.py b/openeo/processes/processes.py new file mode 100644 index 000000000..8eb192d01 --- /dev/null +++ b/openeo/processes/processes.py @@ -0,0 +1,3575 @@ + +# This file is automatically generated. +# Do not edit directly. + +from openeo.processes.builder import ProcessBuilderBase, UNSET + + +class ProcessBuilder(ProcessBuilderBase): + + def __add__(self, other): + return self.add(other) + + + def absolute(self): + """ + Absolute value + + :param self: A number. + + :return: The computed absolute value. + """ + return absolute(x=self) + + def add(self, y): + """ + Addition of two numbers + + :param self: The first summand. + :param y: The second summand. + + :return: The computed sum of the two numbers. + """ + return add(x=self, y=y) + + def add_dimension(self, name, label, type=UNSET): + """ + Add a new dimension + + :param self: A data cube to add the dimension to. + :param name: Name for the dimension. + :param label: A dimension label. + :param type: The type of dimension, defaults to `other`. + + :return: The data cube with a newly added dimension. The new dimension has exactly one dimension label. + All other dimensions remain unchanged. + """ + return add_dimension(data=self, name=name, label=label, type=type) + + def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context=UNSET): + """ + Zonal statistics for geometries + + :param self: A raster data cube. The data cube implicitly gets restricted to the bounds of the + geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding + parameters immediately before this process. + :param geometries: Geometries as GeoJSON on which the aggregation will be based. + :param reducer: A reducer to be applied on all values of each geometry. A reducer is a single process + such as ``mean()`` or a set of processes, which computes a single value for a list of values, see the + category 'reducer' for such processes. + :param target_dimension: The new dimension name to be used for storing the results. Defaults to + `result`. + :param context: Additional data to be passed to the reducer. + + :return: A vector data cube with the computed results and restricted to the bounds of the geometries. + The computed value is stored in dimension with the name that was specified in the parameter + `target_dimension`. The computation also stores information about the total count of pixels (valid + + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each geometry. These values are + stored as new dimension with a dimension name derived from `target_dimension` by adding the suffix + `_meta`. The new dimension has the dimension labels `total_count` and `valid_count`. + """ + return aggregate_spatial(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) + + def aggregate_spatial_binary(self, geometries, reducer, target_dimension=UNSET, context=UNSET): + """ + Zonal statistics for geometries by binary aggregation + + :param self: A raster data cube. The data cube implicitly gets restricted to the bounds of the + geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding + parameters immediately before this process. + :param geometries: Geometries as GeoJSON on which the aggregation will be based. + :param reducer: A reduction operator to be applied consecutively on tuples of values. It must be both + associative and commutative as the execution may be executed in parallel and therefore the order of + execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or + consist of multiple sub-processes. + :param target_dimension: The new dimension name to be used for storing the results. Defaults to + `result`. + :param context: Additional data to be passed to the reducer. + + :return: A vector data cube with the computed results and restricted to the bounds of the geometries. + The computed value is stored in dimension with the name that was specified in the parameter + `target_dimension`. The computation also stores information about the total count of pixels (valid + + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each geometry. These values are + stored as new dimension with a dimension name derived from `target_dimension` by adding the suffix + `_meta`. The new dimension has the dimension labels `total_count` and `valid_count`. + """ + return aggregate_spatial_binary(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) + + def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET): + """ + Temporal aggregations + + :param self: A data cube. + :param intervals: Left-closed temporal intervals, which are allowed to overlap. Each temporal interval + in the array has exactly two elements: 1. The first element is the start of the temporal interval. The + specified instance in time is **included** in the interval. 2. The second element is the end of the + temporal interval. The specified instance in time is **excluded** from the interval. The specified + temporal strings follow [RFC 3339](https://tools.ietf.org/html/rfc3339). Although [RFC 3339 prohibits + the hour to be '24'](https://tools.ietf.org/html/rfc3339#section-5.7), **this process allows the value + '24' for the hour** of an end time in order to make it possible that left-closed time intervals can + fully cover the day. + :param reducer: A reducer to be applied on all values along the specified dimension. A reducer is a + single process such as ``mean()`` or a set of processes, which computes a single value for a list of + values, see the category 'reducer' for such processes. + :param labels: Distinct labels for the intervals, which can contain dates and/or times. Is only + required to be specified if the values for the start of the temporal intervals are not distinct and + thus the default labels would not be unique. The number of labels and the number of groups need to be + equal. + :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is + passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is + expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more + dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the given + temporal dimension. + """ + return aggregate_temporal(data=self, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) + + def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UNSET): + """ + Temporal aggregations based on calendar hierarchies + + :param self: A data cube. + :param period: The time intervals to aggregate. The following pre-defined values are available: * + `hour`: Hour of the day * `day`: Day of the year * `week`: Week of the year * `dekad`: Ten day periods, + counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third + dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each + year. * `month`: Month of the year * `season`: Three month periods of the calendar seasons (December - + February, March - May, June - August, September - November). * `tropical-season`: Six month periods of + the tropical seasons (November - April, May - October). * `year`: Proleptic years * `decade`: Ten year + periods ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a + 0 to the next year ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 + decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the Anno Domini (AD) + calendar era, from a year ending in a 1 to the next year ending in a 0. + :param reducer: A reducer to be applied on all values along the specified dimension. A reducer is a + single process such as ``mean()`` or a set of processes, which computes a single value for a list of + values, see the category 'reducer' for such processes. + :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is + passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is + expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more + dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the given + temporal dimension. The specified temporal dimension has the following dimension labels (`YYYY` = four- + digit year, `MM` = two-digit month, `DD` two-digit day of month): * `hour`: `YYYY-MM-DD-00` - `YYYY- + MM-DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - `YYYY-52` * `dekad`: `YYYY-00` - + `YYYY-36` * `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` (December - February), `YYYY-mam` + (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November). * `tropical-season`: + `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October). * `year`: `YYYY` * `decade`: `YYY0` * + `decade-ad`: `YYY1` + """ + return aggregate_temporal_period(data=self, period=period, reducer=reducer, dimension=dimension, context=context) + + def all(self, ignore_nodata=UNSET): + """ + Are all of the values true? + + :param self: A set of boolean values. + :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. + + :return: Boolean result of the logical operation. + """ + return all(data=self, ignore_nodata=ignore_nodata) + + def and_(self, y): + """ + Logical AND + + :param self: A boolean value. + :param y: A boolean value. + + :return: Boolean result of the logical AND. + """ + return and_(x=self, y=y) + + def anomaly(self, normals, period): + """ + Computes anomalies + + :param self: A data cube with exactly one temporal dimension and the following dimension labels for the + given period (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month): * + `hour`: `YYYY-MM-DD-00` - `YYYY-MM-DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - + `YYYY-52` * `dekad`: `YYYY-00` - `YYYY-36` * `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` + (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - + November). * `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October). * + `year`: `YYYY` * `decade`: `YYY0` * `decade-ad`: `YYY1` * `single-period` / `climatology-period`: Any + ``aggregate_temporal_period()`` can compute such a data cube. + :param normals: A data cube with normals, e.g. daily, monthly or yearly values computed from a process + such as ``climatological_normal()``. Must contain exactly one temporal dimension with the following + dimension labels for the given period: * `hour`: `00` - `23` * `day`: `001` - `365` * `week`: `01` - + `52` * `dekad`: `00` - `36` * `month`: `01` - `12` * `season`: `djf` (December - February), `mam` + (March - May), `jja` (June - August), `son` (September - November) * `tropical-season`: `ndjfma` + (November - April), `mjjaso` (May - October) * `year`: Four-digit year numbers * `decade`: Four-digit + year numbers, the last digit being a `0` * `decade-ad`: Four-digit year numbers, the last digit being a + `1` * `single-period` / `climatology-period`: A single dimension label with any name is expected. + :param period: Specifies the time intervals available in the normals data cube. The following options + are available: * `hour`: Hour of the day * `day`: Day of the year * `week`: Week of the year * + `dekad`: Ten day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - + end of month). The third dekad of the month can range from 8 to 11 days. For example, the fourth dekad + is Feb, 1 - Feb, 10 each year. * `month`: Month of the year * `season`: Three month periods of the + calendar seasons (December - February, March - May, June - August, September - November). * `tropical- + season`: Six month periods of the tropical seasons (November - April, May - October). * `year`: + Proleptic years * `decade`: Ten year periods ([0-to-9 + decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next + year ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 + decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the Anno Domini (AD) + calendar era, from a year ending in a 1 to the next year ending in a 0. * `single-period` / + `climatology-period`: A single period of arbitrary length + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged. + """ + return anomaly(data=self, normals=normals, period=period) + + def any(self, ignore_nodata=UNSET): + """ + Is at least one value true? + + :param self: A set of boolean values. + :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. + + :return: Boolean result of the logical operation. + """ + return any(data=self, ignore_nodata=ignore_nodata) + + def apply(self, process, context=UNSET): + """ + Apply a process to each pixel + + :param self: A data cube. + :param process: A unary process to be applied on each value, may consist of multiple sub-processes. + :param context: Additional data to be passed to the process. + + :return: A data cube with the newly computed values and the same dimensions. The dimension properties + (name, type, labels, reference system and resolution) remain unchanged. + """ + return apply(data=self, process=process, context=context) + + def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UNSET): + """ + Apply a process to pixels along a dimension + + :param self: A data cube. + :param process: Process to be applied on all pixel values. The specified process needs to accept an + array as parameter and must return an array with least one element. A process may consist of multiple + sub-processes. + :param dimension: The name of the source dimension to apply the process on. Fails with a + `DimensionNotAvailable` error if the specified dimension does not exist. + :param target_dimension: The name of the target dimension or `null` (the default) to use the source + dimension specified in the parameter `dimension`. By specifying a target dimension, the source + dimension is removed. The target dimension with the specified name and the type `other` (see + ``add_dimension()``) is created, if if doesn't exist yet. + :param context: Additional data to be passed to the process. + + :return: A data cube with the newly computed values. All dimensions stay the same, except for the + dimensions specified in corresponding parameters. There are three cases how the data cube changes: 1. + The source dimension **is** the target dimension: * The (number of) dimensions remain unchanged. + * The source dimension properties name, type and reference system remain unchanged. * The dimension + labels and the resolution are preserved when the number of pixel values in the source dimension is + equal to the number of values computed by the process. The other case is described below. 2. The source + dimension **is not** the target dimension and the latter **exists**: * The number of dimensions + decreases by one as the source dimension is dropped. * The target dimension properties name, type + and reference system remain unchanged. * The resolution changes, the number of dimension labels is + equal to the number of values computed by the process and the dimension labels are incrementing + integers starting from zero 3. The source dimension **is not** the target dimension and the latter + **does not exist**: * The number of dimensions remain unchanged, but the source dimension is + replaced with the target dimension. * The target dimension has the specified name and the type + other. The reference system is not changed. * The resolution changes, the number of dimension labels + is equal to the number of values computed by the process and the dimension labels are incrementing + integers starting from zero For all three cases except for the exception in the first case, the + resolution changes, the number of dimension labels is equal to the number of values computed by the + process and the dimension labels are incrementing integers starting from zero. + """ + return apply_dimension(data=self, process=process, dimension=dimension, target_dimension=target_dimension, context=context) + + def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET): + """ + Apply a spatial convolution with a kernel + + :param self: A data cube. + :param kernel: Kernel as a two-dimensional array of weights. The inner level of the nested array aligns + with the `x` axis and the outer level aligns with the `y` axis. Each level of the kernel must have an + uneven number of elements, otherwise the process throws a `KernelDimensionsUneven` error. + :param factor: A factor that is multiplied to each value after the kernel has been applied. This is + basically a shortcut for explicitly multiplying each value by a factor afterwards, which is often + required for some kernel-based algorithms such as the Gaussian blur. + :param border: Determines how the data is extended when the kernel overlaps with the borders. Defaults + to fill the border with zeroes. The following options are available: * *numeric value* - fill with a + user-defined constant number `n`: `nnnnnn|abcdefgh|nnnnnn` (default, with `n` = 0) * `replicate` - + repeat the value from the pixel at the border: `aaaaaa|abcdefgh|hhhhhh` * `reflect` - mirror/reflect + from the border: `fedcba|abcdefgh|hgfedc` * `reflect_pixel` - mirror/reflect from the center of the + pixel at the border: `gfedcb|abcdefgh|gfedcb` * `wrap` - repeat/wrap the image: + `cdefgh|abcdefgh|abcdef` + :param replace_invalid: This parameter specifies the value to replace non-numerical or infinite + numerical values with. By default, those values are replaced with zeroes. + + :return: A data cube with the newly computed values and the same dimensions. The dimension properties + (name, type, labels, reference system and resolution) remain unchanged. + """ + return apply_kernel(data=self, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) + + def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET): + """ + Apply a process to pixels in a n-dimensional neighbourhood + + :param self: A data cube. + :param process: Process to be applied on all neighbourhoods. + :param size: Neighbourhood sizes along each dimension. This object maps dimension names to either a + physical measure (e.g. 100 m, 10 days) or pixels (e.g. 32 pixels). For dimensions not specified, the + default is to provide all values. Be aware that including all values from overly large dimensions may + not be processed at once. + :param overlap: Overlap of neighbourhoods along each dimension to avoid border effects. For instance a + temporal dimension can add 1 month before and after a neighbourhood. In the spatial dimensions, this is + often a number of pixels. The overlap specified is added before and after, so an overlap of 8 pixels + will add 8 pixels on both sides of the window, so 16 in total. Be aware that large overlaps increase + the need for computational resources and modifying overlapping data in subsequent operations have no + effect. + :param context: Additional data to be passed to the process. + + :return: A data cube with the newly computed values and the same dimensions. The dimension properties + (name, type, labels, reference system and resolution) remain unchanged. + """ + return apply_neighborhood(data=self, process=process, size=size, overlap=overlap, context=context) + + def arccos(self): + """ + Inverse cosine + + :param self: A number. + + :return: The computed angle in radians. + """ + return arccos(x=self) + + def arcosh(self): + """ + Inverse hyperbolic cosine + + :param self: A number. + + :return: The computed angle in radians. + """ + return arcosh(x=self) + + def arcsin(self): + """ + Inverse sine + + :param self: A number. + + :return: The computed angle in radians. + """ + return arcsin(x=self) + + def arctan(self): + """ + Inverse tangent + + :param self: A number. + + :return: The computed angle in radians. + """ + return arctan(x=self) + + def arctan2(self, x): + """ + Inverse tangent of two numbers + + :param self: A number to be used as dividend. + :param x: A number to be used as divisor. + + :return: The computed angle in radians. + """ + return arctan2(y=self, x=x) + + def array_apply(self, process, context=UNSET): + """ + Applies a unary process to each array element + + :param self: An array. + :param process: A process to be applied on each value, may consist of multiple sub-processes. The + specified process must be unary meaning that it must work on a single value. + :param context: Additional data to be passed to the process. + + :return: An array with the newly computed values. The number of elements are the same as for the + original array. + """ + return array_apply(data=self, process=process, context=context) + + def array_contains(self, value): + """ + Check whether the array contains a given value + + :param self: List to find the value in. + :param value: Value to find in `data`. + + :return: Returns `true` if the list contains the value, false` otherwise. + """ + return array_contains(data=self, value=value) + + def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET): + """ + Get an element from an array + + :param self: An array. + :param index: The zero-based index of the element to retrieve. + :param label: The label of the element to retrieve. + :param return_nodata: By default this process throws an `ArrayElementNotAvailable` exception if the + index or label is invalid. If you want to return `null` instead, set this flag to `true`. + + :return: The value of the requested element. + """ + return array_element(data=self, index=index, label=label, return_nodata=return_nodata) + + def array_filter(self, condition, context=UNSET): + """ + Filter an array based on a condition + + :param self: An array. + :param condition: A condition that is evaluated against each value in the array. Only the array + elements where the condition returns `true` are preserved. + :param context: Additional data to be passed to the condition. + + :return: An array filtered by the specified condition. The number of elements are less than or equal + compared to the original array. + """ + return array_filter(data=self, condition=condition, context=context) + + def array_find(self, value): + """ + Get the index for a value in an array + + :param self: List to find the value in. + :param value: Value to find in `data`. + + :return: Returns the index of the first element with the specified value. If no element was found, + `null` is returned. + """ + return array_find(data=self, value=value) + + def array_labels(self): + """ + Get the labels for an array + + :param self: An array with labels. + + :return: The labels as array. + """ + return array_labels(data=self) + + def arsinh(self): + """ + Inverse hyperbolic sine + + :param self: A number. + + :return: The computed angle in radians. + """ + return arsinh(x=self) + + def artanh(self): + """ + Inverse hyperbolic tangent + + :param self: A number. + + :return: The computed angle in radians. + """ + return artanh(x=self) + + def between(self, min, max, exclude_max=UNSET): + """ + Between comparison + + :param self: The value to check. + :param min: Lower boundary (inclusive) to check against. + :param max: Upper boundary (inclusive) to check against. + :param exclude_max: Exclude the upper boundary `max` if set to `true`. Defaults to `false`. + + :return: `true` if `x` is between the specified bounds, otherwise `false`. + """ + return between(x=self, min=min, max=max, exclude_max=exclude_max) + + def ceil(self): + """ + Round fractions up + + :param self: A number to round up. + + :return: The number rounded up. + """ + return ceil(x=self) + + def climatological_normal(self, period, climatology_period=UNSET): + """ + Computes climatology normals + + :param self: A data cube with exactly one temporal dimension. The data cube must span at least the + temporal interval specified in the parameter `climatology-period`. Seasonal periods may span two + consecutive years, e.g. temporal winter that includes months December, January and February. If the + required months before the actual climate period are available, the season is taken into account. If + not available, the first season is not taken into account and the seasonal mean is based on one year + less than the other seasonal normals. The incomplete season at the end of the last year is never taken + into account. + :param period: The time intervals to aggregate the average value for. The following pre-defined + frequencies are supported: * `day`: Day of the year * `month`: Month of the year * `climatology- + period`: The period specified in the `climatology-period`. * `season`: Three month periods of the + calendar seasons (December - February, March - May, June - August, September - November). * `tropical- + season`: Six month periods of the tropical seasons (November - April, May - October). + :param climatology_period: The climatology period as closed temporal interval. The first element of the + array is the first year to be fully included in the temporal interval. The second element is the last + year to be fully included in the temporal interval. The default period is from 1981 until 2010 (both + inclusive). + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the temporal + dimension. The temporal dimension has the following dimension labels: * `day`: `001` - `365` * + `month`: `01` - `12` * `climatology-period`: `climatology-period` * `season`: `djf` (December - + February), `mam` (March - May), `jja` (June - August), `son` (September - November) * `tropical- + season`: `ndjfma` (November - April), `mjjaso` (May - October) + """ + return climatological_normal(data=self, period=period, climatology_period=climatology_period) + + def clip(self, min, max): + """ + Clip a value between a minimum and a maximum + + :param self: A number. + :param min: Minimum value. If the value is lower than this value, the process will return the value of + this parameter. + :param max: Maximum value. If the value is greater than this value, the process will return the value + of this parameter. + + :return: The value clipped to the specified range. + """ + return clip(x=self, min=min, max=max) + + def constant(self): + """ + Define a constant value + + :param self: The value of the constant. + + :return: The value of the constant. + """ + return constant(x=self) + + def cos(self): + """ + Cosine + + :param self: An angle in radians. + + :return: The computed cosine of `x`. + """ + return cos(x=self) + + def cosh(self): + """ + Hyperbolic cosine + + :param self: An angle in radians. + + :return: The computed hyperbolic cosine of `x`. + """ + return cosh(x=self) + + def count(self, condition=UNSET, context=UNSET): + """ + Count the number of elements + + :param self: An array with elements of any data type. + :param condition: A condition consists of one ore more processes, which in the end return a boolean + value. It is evaluated against each element in the array. An element is counted only if the condition + returns `true`. Defaults to count valid elements in a list (see ``is_valid()``). Setting this parameter + to boolean `true` counts all elements in the list. + :param context: Additional data to be passed to the condition. + + :return: The counted number of elements. + """ + return count(data=self, condition=condition, context=context) + + def create_raster_cube(self): + """ + Create an empty raster data cube + + :return: An empty raster data cube with zero dimensions. + """ + return create_raster_cube() + + def cummax(self, ignore_nodata=UNSET): + """ + Cumulative maxima + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is set for all the following + elements. + + :return: An array with the computed cumulative maxima. + """ + return cummax(data=self, ignore_nodata=ignore_nodata) + + def cummin(self, ignore_nodata=UNSET): + """ + Cumulative minima + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is set for all the following + elements. + + :return: An array with the computed cumulative minima. + """ + return cummin(data=self, ignore_nodata=ignore_nodata) + + def cumproduct(self, ignore_nodata=UNSET): + """ + Cumulative products + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is set for all the following + elements. + + :return: An array with the computed cumulative products. + """ + return cumproduct(data=self, ignore_nodata=ignore_nodata) + + def cumsum(self, ignore_nodata=UNSET): + """ + Cumulative sums + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is set for all the following + elements. + + :return: An array with the computed cumulative sums. + """ + return cumsum(data=self, ignore_nodata=ignore_nodata) + + def debug(self, code=UNSET, level=UNSET, message=UNSET): + """ + Publish debugging information + + :param self: Data to publish. + :param code: An identifier to help identify the log entry in a bunch of other log entries. + :param level: The severity level of this message, defaults to `info`. Note that the level `error` + forces the computation to be stopped! + :param message: A message to send in addition to the data. + + :return: Returns the data as passed to the `data` parameter. + """ + return debug(data=self, code=code, level=level, message=message) + + def dimension_labels(self, dimension): + """ + Get the dimension labels + + :param self: The data cube. + :param dimension: The name of the dimension to get the labels for. + + :return: The labels as array. + """ + return dimension_labels(data=self, dimension=dimension) + + def divide(self, y): + """ + Division of two numbers + + :param self: The dividend. + :param y: The divisor. + + :return: The computed result. + """ + return divide(x=self, y=y) + + def drop_dimension(self, name): + """ + Remove a dimension + + :param self: The data cube to drop a dimension from. + :param name: Name of the dimension to drop. + + :return: A data cube without the specified dimension. The number of dimensions decreases by one, but + the dimension properties (name, type, labels, reference system and resolution) for all other dimensions + remain unchanged. + """ + return drop_dimension(data=self, name=name) + + def e(self): + """ + Euler's number (e) + + :return: The numerical value of Euler's number. + """ + return e() + + def eq(self, y, delta=UNSET, case_sensitive=UNSET): + """ + Equal to comparison + + :param self: First operand. + :param y: Second operand. + :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a + positive non-zero number the equality of two numbers is checked against a delta value. This is + especially useful to circumvent problems with floating point inaccuracy in machine-based computation. + This option is basically an alias for the following computation: `lte(abs(minus([x, y]), delta)` + :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be + disabled by setting this parameter to `false`. + + :return: Returns `true` if `x` is equal to `y`, `null` if any operand is `null`, otherwise `false`. + """ + return eq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) + + def exp(self): + """ + Exponentiation to the base e + + :param self: The numerical exponent. + + :return: The computed value for *e* raised to the power of `p`. + """ + return exp(p=self) + + def extrema(self, ignore_nodata=UNSET): + """ + Minimum and maximum values + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that an array with two `null` values is + returned if any value is such a value. + + :return: An array containing the minimum and maximum values for the specified numbers. The first + element is the minimum, the second element is the maximum. If the input array is empty both elements + are set to `null`. + """ + return extrema(data=self, ignore_nodata=ignore_nodata) + + def filter_bands(self, bands=UNSET, wavelengths=UNSET): + """ + Filter the bands by name + + :param self: A data cube with bands. + :param bands: A list of band names. Either the unique band name (metadata field `name` in bands) or one + of the common band names (metadata field `common_name` in bands). If unique band name and common name + conflict, the unique band name has higher priority. The order of the specified array defines the order + of the bands in the data cube. If multiple bands match a common name, all matched bands are included in + the original order. + :param wavelengths: A list of sub-lists with each sub-list consisting of two elements. The first + element is the minimum wavelength and the second element is the maximum wavelength. Wavelengths are + specified in micrometres (μm). The order of the specified array defines the order of the bands in the + data cube. If multiple bands match the wavelengths, all matched bands are included in the original + order. + + :return: A data cube limited to a subset of its original bands. The dimensions and dimension properties + (name, type, labels, reference system and resolution) remain unchanged, except that the dimension of + type `bands` has less (or the same) dimension labels. + """ + return filter_bands(data=self, bands=bands, wavelengths=wavelengths) + + def filter_bbox(self, extent): + """ + Spatial filter using a bounding box + + :param self: A data cube. + :param extent: A bounding box, which may include a vertical axis (see `base` and `height`). + + :return: A data cube restricted to the bounding box. The dimensions and dimension properties (name, + type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions + have less (or the same) dimension labels. + """ + return filter_bbox(data=self, extent=extent) + + def filter_labels(self, condition, dimension, context=UNSET): + """ + Filter dimension labels based on a condition + + :param self: A data cube. + :param condition: A condition that is evaluated against each dimension label in the specified + dimension. A dimension label and the corresponding data is preserved for the given dimension, if the + condition returns `true`. + :param dimension: The name of the dimension to filter on. Fails with a `DimensionNotAvailable` error if + the specified dimension does not exist. + :param context: Additional data to be passed to the condition. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except that the given dimension has less (or the same) + dimension labels. + """ + return filter_labels(data=self, condition=condition, dimension=dimension, context=context) + + def filter_spatial(self, geometries): + """ + Spatial filter using geometries + + :param self: A data cube. + :param geometries: One or more geometries used for filtering, specified as GeoJSON. + + :return: A data cube restricted to the specified geometries. The dimensions and dimension properties + (name, type, labels, reference system and resolution) remain unchanged, except that the spatial + dimensions have less (or the same) dimension labels. + """ + return filter_spatial(data=self, geometries=geometries) + + def filter_temporal(self, extent, dimension=UNSET): + """ + Temporal filter for a temporal intervals + + :param self: A data cube. + :param extent: Left-closed temporal interval, i.e. an array with exactly two elements: 1. The first + element is the start of the temporal interval. The specified instance in time is **included** in the + interval. 2. The second element is the end of the temporal interval. The specified instance in time is + **excluded** from the interval. The specified temporal strings follow [RFC + 3339](https://tools.ietf.org/html/rfc3339). Also supports open intervals by setting one of the + boundaries to `null`, but never both. + :param dimension: The name of the temporal dimension to filter on. If the dimension is not set or is + set to `null`, the filter applies to all temporal dimensions. Fails with a `DimensionNotAvailable` + error if the specified dimension does not exist. + + :return: A data cube restricted to the specified temporal extent. The dimensions and dimension + properties (name, type, labels, reference system and resolution) remain unchanged, except that the + given temporal dimension(s) have less (or the same) dimension labels. + """ + return filter_temporal(data=self, extent=extent, dimension=dimension) + + def first(self, ignore_nodata=UNSET): + """ + First element + + :param self: An array with elements of any data type. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if the first value is + such a value. + + :return: The first element of the input array. + """ + return first(data=self, ignore_nodata=ignore_nodata) + + def floor(self): + """ + Round fractions down + + :param self: A number to round down. + + :return: The number rounded down. + """ + return floor(x=self) + + def gt(self, y): + """ + Greater than comparison + + :param self: First operand. + :param y: Second operand. + + :return: `true` if `x` is strictly greater than `y` or `null` if any operand is `null`, otherwise + `false`. + """ + return gt(x=self, y=y) + + def gte(self, y): + """ + Greater than or equal to comparison + + :param self: First operand. + :param y: Second operand. + + :return: `true` if `x` is greater than or equal to `y`, `null` if any operand is `null`, otherwise + `false`. + """ + return gte(x=self, y=y) + + def if_(self, accept, reject=UNSET): + """ + If-Then-Else conditional + + :param self: A boolean value. + :param accept: A value that is returned if the boolean value is `true`. + :param reject: A value that is returned if the boolean value is **not** `true`. Defaults to `null`. + + :return: Either the `accept` or `reject` argument depending on the given boolean value. + """ + return if_(value=self, accept=accept, reject=reject) + + def int(self): + """ + Integer part of a number + + :param self: A number. + + :return: Integer part of the number. + """ + return int(x=self) + + def is_nan(self): + """ + Value is not a number + + :param self: The data to check. + + :return: `true` if the data is not a number, otherwise `false` + """ + return is_nan(x=self) + + def is_nodata(self): + """ + Value is not a no-data value + + :param self: The data to check. + + :return: `true` if the data is a no-data value, otherwise `false` + """ + return is_nodata(x=self) + + def is_valid(self): + """ + Value is valid data + + :param self: The data to check. + + :return: `true` if the data is valid, otherwise `false`. + """ + return is_valid(x=self) + + def last(self, ignore_nodata=UNSET): + """ + Last element + + :param self: An array with elements of any data type. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if the last value is + such a value. + + :return: The last element of the input array. + """ + return last(data=self, ignore_nodata=ignore_nodata) + + def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET): + """ + Linear transformation between two ranges + + :param self: A number to transform. The number gets clipped to the bounds specified in `inputMin` and + `inputMax`. + :param inputMin: Minimum value the input can obtain. + :param inputMax: Maximum value the input can obtain. + :param outputMin: Minimum value of the desired output range. + :param outputMax: Maximum value of the desired output range. + + :return: The transformed number. + """ + return linear_scale_range(x=self, inputMin=inputMin, inputMax=inputMax, outputMin=outputMin, outputMax=outputMax) + + def ln(self): + """ + Natural logarithm + + :param self: A number to compute the natural logarithm for. + + :return: The computed natural logarithm. + """ + return ln(x=self) + + def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET): + """ + Load a collection + + :param self: The collection id. + :param spatial_extent: Limits the data to load from the collection to the specified bounding box or + polygons. The process puts a pixel into the data cube if the point at the pixel center intersects with + the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC). The + GeoJSON can be one of the following GeoJSON types: * A `Polygon` geometry, * a `GeometryCollection` + containing Polygons, * a `Feature` with a `Polygon` geometry or * a `FeatureCollection` containing + `Feature`s with a `Polygon` geometry. Set this parameter to `null` to set no limit for the spatial + extent. Be careful with this when loading large datasets! + :param temporal_extent: Limits the data to load from the collection to the specified left-closed + temporal interval. Applies to all temporal dimensions. The interval has to be specified as an array + with exactly two elements: 1. The first element is the start of the temporal interval. The specified + instance in time is **included** in the interval. 2. The second element is the end of the temporal + interval. The specified instance in time is **excluded** from the interval. The specified temporal + strings follow [RFC 3339](https://tools.ietf.org/html/rfc3339). Also supports open intervals by setting + one of the boundaries to `null`, but never both. Set this parameter to `null` to set no limit for the + spatial extent. Be careful with this when loading large datasets! + :param bands: Only adds the specified bands into the data cube so that bands that don't match the list + of band names are not available. Applies to all dimensions of type `bands`. Either the unique band + name (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in + bands) can be specified. If unique band name and common name conflict, the unique band name has higher + priority. The order of the specified array defines the order of the bands in the data cube. f multiple + bands match a common name, all matched bands are included in the original order. + :param properties: Limits the data by metadata properties to include only data in the data cube which + all given conditions return `true` for (AND operation). Specify key-value-pairs with the key being the + name of the metadata property, which can be retrieved with the openEO Data Discovery for Collections. + The value must a condition (user-defined process) to be evaluated against the collection metadata, see + the example. + + :return: A data cube for further processing. The dimensions and dimension properties (name, type, + labels, reference system and resolution) correspond to the collection's metadata, but the dimension + labels are restricted as specified in the parameters. + """ + return load_collection(id=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) + + def load_result(self): + """ + Load batch job results + + :param self: The id of a batch job with results. + + :return: A data cube for further processing. + """ + return load_result(id=self) + + def load_uploaded_files(self, format, options=UNSET): + """ + Load files from the user workspace + + :param self: The files to read. Folders can't be specified, instead specify all files. An error is + thrown if a file can't be read. + :param format: The file format to read from. It must be one of the values that the server reports as + supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is + not suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is + *case insensitive*. + :param options: The file format parameters to be used to read the files. Must correspond to the + parameters that the server reports as supported parameters for the chosen `format`. The parameter names + and valid values usually correspond to the GDAL/OGR format options. + + :return: A data cube for further processing. + """ + return load_uploaded_files(paths=self, format=format, options=options) + + def log(self, base): + """ + Logarithm to a base + + :param self: A number to compute the logarithm for. + :param base: The numerical base. + + :return: The computed logarithm. + """ + return log(x=self, base=base) + + def lt(self, y): + """ + Less than comparison + + :param self: First operand. + :param y: Second operand. + + :return: `true` if `x` is strictly less than `y`, `null` if any operand is `null`, otherwise `false`. + """ + return lt(x=self, y=y) + + def lte(self, y): + """ + Less than or equal to comparison + + :param self: First operand. + :param y: Second operand. + + :return: `true` if `x` is less than or equal to `y`, `null` if any operand is `null`, otherwise + `false`. + """ + return lte(x=self, y=y) + + def mask(self, mask, replacement=UNSET): + """ + Apply a raster mask + + :param self: A raster data cube. + :param mask: A mask as raster data cube. Every pixel in `data` must have a corresponding element in + `mask`. + :param replacement: The value used to replace masked values with. + + :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, + labels, reference system and resolution) remain unchanged. + """ + return mask(data=self, mask=mask, replacement=replacement) + + def mask_polygon(self, mask, replacement=UNSET, inside=UNSET): + """ + Apply a polygon mask + + :param self: A raster data cube. + :param mask: A GeoJSON object containing a polygon. The provided feature types can be one of the + following: * A `Polygon` geometry, * a `GeometryCollection` containing Polygons, * a `Feature` with a + `Polygon` geometry or * a `FeatureCollection` containing `Feature`s with a `Polygon` geometry. + :param replacement: The value used to replace masked values with. + :param inside: If set to `true` all pixels for which the point at the pixel center **does** intersect + with any polygon are replaced. + + :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, + labels, reference system and resolution) remain unchanged. + """ + return mask_polygon(data=self, mask=mask, replacement=replacement, inside=inside) + + def max(self, ignore_nodata=UNSET): + """ + Maximum value + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The maximum value. + """ + return max(data=self, ignore_nodata=ignore_nodata) + + def mean(self, ignore_nodata=UNSET): + """ + Arithmetic mean (average) + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The computed arithmetic mean. + """ + return mean(data=self, ignore_nodata=ignore_nodata) + + def median(self, ignore_nodata=UNSET): + """ + Statistical median + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The computed statistical median. + """ + return median(data=self, ignore_nodata=ignore_nodata) + + def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET): + """ + Merging two data cubes + + :param self: The first data cube. + :param cube2: The second data cube. + :param overlap_resolver: A reduction operator that resolves the conflict if the data overlaps. The + reducer must return a value of the same data type as the input values are. The reduction operator may + be a single process such as ``multiply()`` or consist of multiple sub-processes. `null` (the default) + can be specified if no overlap resolver is required. + :param context: Additional data to be passed to the overlap resolver. + + :return: The merged data cube. See the process description for details regarding the dimensions and + dimension properties (name, type, labels, reference system and resolution). + """ + return merge_cubes(cube1=self, cube2=cube2, overlap_resolver=overlap_resolver, context=context) + + def min(self, ignore_nodata=UNSET): + """ + Minimum value + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The minimum value. + """ + return min(data=self, ignore_nodata=ignore_nodata) + + def mod(self, y): + """ + Modulo + + :param self: A number to be used as dividend. + :param y: A number to be used as divisor. + + :return: The remainder after division. + """ + return mod(x=self, y=y) + + def multiply(self, y): + """ + Multiplication of two numbers + + :param self: The multiplier. + :param y: The multiplicand. + + :return: The computed product of the two numbers. + """ + return multiply(x=self, y=y) + + def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET): + """ + Normalized Difference Vegetation Index + + :param self: A raster data cube with two bands that have the common names `red` and `nir` assigned. + :param nir: The name of the NIR band. Defaults to the band that has the common name `nir` assigned. + Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata + field `common_name` in bands) can be specified. If unique band name and common name conflict, the + unique band name has higher priority. + :param red: The name of the red band. Defaults to the band that has the common name `red` assigned. + Either the unique band name (metadata field `name` in bands) or one of the common band names (metadata + field `common_name` in bands) can be specified. If unique band name and common name conflict, the + unique band name has higher priority. + :param target_band: By default, the dimension of type `bands` is dropped. To keep the dimension specify + a new band name in this parameter so that a new dimension label with the specified name will be added + for the computed values. + + :return: A raster data cube containing the computed NDVI values. The structure of the data cube differs + depending on the value passed to `target_band`: * `target_band` is `null`: The data cube does not + contain the dimension of type `bands` any more, the number of dimensions decreases by one. The + dimension properties (name, type, labels, reference system and resolution) for all other dimensions + remain unchanged. * `target_band` is a string: The data cube keeps the same dimensions. The dimension + properties remain unchanged, but the number of dimension labels for the dimension of type `bands` + increases by one. The additional label is named as specified in `target_band`. + """ + return ndvi(data=self, nir=nir, red=red, target_band=target_band) + + def neq(self, y, delta=UNSET, case_sensitive=UNSET): + """ + Not equal to comparison + + :param self: First operand. + :param y: Second operand. + :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a + positive non-zero number the non-equality of two numbers is checked against a delta value. This is + especially useful to circumvent problems with floating point inaccuracy in machine-based computation. + This option is basically an alias for the following computation: `gt(abs(minus([x, y]), delta)` + :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be + disabled by setting this parameter to `false`. + + :return: Returns `true` if `x` is *not* equal to `y`, `null` if any operand is `null`, otherwise + `false`. + """ + return neq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) + + def normalized_difference(self, y): + """ + Normalized difference + + :param self: The value for the first band. + :param y: The value for the second band. + + :return: The computed normalized difference. + """ + return normalized_difference(x=self, y=y) + + def not_(self): + """ + Inverting a boolean + + :param self: Boolean value to invert. + + :return: Inverted boolean value. + """ + return not_(x=self) + + def or_(self, y): + """ + Logical OR + + :param self: A boolean value. + :param y: A boolean value. + + :return: Boolean result of the logical OR. + """ + return or_(x=self, y=y) + + def order(self, asc=UNSET, nodata=UNSET): + """ + Create a permutation + + :param self: An array to compute the order for. + :param asc: The default sort order is ascending, with smallest values first. To sort in reverse + (descending) order, set this parameter to `false`. + :param nodata: Controls the handling of no-data values (`null`). By default they are removed. If + `true`, missing values in the data are put last; if `false`, they are put first. + + :return: The computed permutation. + """ + return order(data=self, asc=asc, nodata=nodata) + + def pi(self): + """ + Pi (π) + + :return: The numerical value of Pi. + """ + return pi() + + def power(self, p): + """ + Exponentiation + + :param self: The numerical base. + :param p: The numerical exponent. + + :return: The computed value for `base` raised to the power of `p`. + """ + return power(base=self, p=p) + + def product(self, ignore_nodata=UNSET): + """ + Compute the product by multiplying numbers + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The computed product of the sequence of numbers. + """ + return product(data=self, ignore_nodata=ignore_nodata) + + def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET): + """ + Quantiles + + :param self: An array of numbers. + :param probabilities: A list of probabilities to calculate quantiles for. The probabilities must be + between 0 and 1. + :param q: A number of intervals to calculate quantiles for. Calculates q-quantiles with (nearly) equal- + sized intervals. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that an array with `null` values is returned + if any element is such a value. + + :return: An array with the computed quantiles. The list has either * as many elements as the given + list of `probabilities` had or * *`q`-1* elements. If the input array is empty the resulting array is + filled with as many `null` values as required according to the list above. For an example, see the + 'Empty array example'. + """ + return quantiles(data=self, probabilities=probabilities, q=q, ignore_nodata=ignore_nodata) + + def rearrange(self, order): + """ + Rearrange an array based on a permutation + + :param self: The array to rearrange. + :param order: The permutation used for rearranging. + + :return: The rearranged array. + """ + return rearrange(data=self, order=order) + + def reduce_dimension(self, reducer, dimension, context=UNSET): + """ + Reduce dimensions + + :param self: A data cube. + :param reducer: A reducer to apply on the specified dimension. A reducer is a single process such as + ``mean()`` or a set of processes, which computes a single value for a list of values, see the category + 'reducer' for such processes. + :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` + error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the newly computed values. It is missing the given dimension, the number of + dimensions decreases by one. The dimension properties (name, type, labels, reference system and + resolution) for all other dimensions remain unchanged. + """ + return reduce_dimension(data=self, reducer=reducer, dimension=dimension, context=context) + + def reduce_dimension_binary(self, reducer, dimension, context=UNSET): + """ + Reduce dimensions using binary reduction + + :param self: A data cube. + :param reducer: A reduction operator to be applied consecutively on pairs of values. It must be both + associative and commutative as the execution may be executed in parallel and therefore the order of + execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or + consist of multiple sub-processes. + :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` + error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the newly computed values. It is missing the given dimension, the number of + dimensions decreases by one. The dimension properties (name, type, labels, reference system and + resolution) for all other dimensions remain unchanged. + """ + return reduce_dimension_binary(data=self, reducer=reducer, dimension=dimension, context=context) + + def rename_dimension(self, source, target): + """ + Rename a dimension + + :param self: The data cube. + :param source: The current name of the dimension. Fails with a `DimensionNotAvailable` error if the + specified dimension does not exist. + :param target: A new Name for the dimension. Fails with a `DimensionExists` error if a dimension with + the specified name exists. + + :return: A data cube with the same dimensions, but the name of one of the dimensions changes. The old + name can not be referred to any longer. The dimension properties (name, type, labels, reference system + and resolution) remain unchanged. + """ + return rename_dimension(data=self, source=source, target=target) + + def rename_labels(self, dimension, target, source=UNSET): + """ + Rename dimension labels + + :param self: The data cube. + :param dimension: The name of the dimension to rename the labels for. + :param target: The new names for the labels. The dimension labels in the data cube are expected to be + enumerated, if the parameter `target` is not specified. If a target dimension label already exists in + the data cube, a `LabelExists` error is thrown. + :param source: The names of the labels as they are currently in the data cube. The array defines an + unsorted and potentially incomplete list of labels that should be renamed to the names available in the + corresponding array elements in the parameter `target`. If one of the source dimension labels doesn't + exist, a `LabelNotAvailable` error is thrown. By default, the array is empty so that the dimension + labels in the data cube are expected to be enumerated. + + :return: The data cube with the same dimensions. The dimension properties (name, type, labels, + reference system and resolution) remain unchanged, except that for the given dimension the labels + change. The old labels can not be referred to any longer. The number of labels remains the same. + """ + return rename_labels(data=self, dimension=dimension, target=target, source=source) + + def resample_cube_spatial(self, target, method=UNSET): + """ + Resample the spatial dimensions to match a target data cube + + :param self: A data cube. + :param target: A data cube that describes the spatial target resolution. + :param method: Resampling method. Methods are inspired by GDAL, see + [gdalwarp](https://www.gdal.org/gdalwarp.html) for more information. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the spatial + dimensions. + """ + return resample_cube_spatial(data=self, target=target, method=method) + + def resample_cube_temporal(self, target, method, dimension=UNSET, context=UNSET): + """ + Resample a temporal dimension to match a target data cube + + :param self: A data cube. + :param target: A data cube that describes the temporal target resolution. + :param method: A resampling method to be applied, could be a reducer for downsampling or other methods + for upsampling. A reducer is a single process such as ``mean()`` or a set of processes, which computes + a single value for a list of values, see the category 'reducer' for such processes. + :param dimension: The name of the temporal dimension to resample, which must exist with this name in + both data cubes. If the dimension is not set or is set to `null`, the data cube is expected to only + have one temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails + with a `DimensionNotAvailable` error if the specified dimension does not exist. + :param context: Additional data to be passed to the process specified for the parameter `method`. + + :return: A raster data cube with the same dimensions and the same dimension properties (name, type, + labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension + the name and type remain unchanged, but the reference system changes and the labels and resolution may + change. + """ + return resample_cube_temporal(data=self, target=target, method=method, dimension=dimension, context=context) + + def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET): + """ + Resample and warp the spatial dimensions + + :param self: A raster data cube. + :param resolution: Resamples the data cube to the target resolution, which can be specified either as + separate values for x and y or as a single value for both axes. Specified in the units of the target + projection. Doesn't change the resolution by default (`0`). + :param projection: Warps the data cube to the target projection, specified as as [EPSG + code](http://www.epsg-registry.org/), [WKT2 (ISO 19162) + string](http://docs.opengeospatial.org/is/18-010r7/18-010r7.html), [PROJ definition + (deprecated)](https://proj.org/usage/quickstart.html). By default (`null`), the projection is not + changed. + :param method: Resampling method. Methods are inspired by GDAL, see + [gdalwarp](https://www.gdal.org/gdalwarp.html) for more information. + :param align: Specifies to which corner of the spatial extent the new resampled data is aligned to. + + :return: A raster data cube with values warped onto the new projection. It has the same dimensions and + the same dimension properties (name, type, labels, reference system and resolution) for all non-spatial + or vertical spatial dimensions. For the horizontal spatial dimensions the name and type remain + unchanged, but reference system, labels and resolution may change depending on the given parameters. + """ + return resample_spatial(data=self, resolution=resolution, projection=projection, method=method, align=align) + + def round(self, p=UNSET): + """ + Round to a specified precision + + :param self: A number to round. + :param p: A positive number specifies the number of digits after the decimal point to round to. A + negative number means rounding to a power of ten, so for example *-2* rounds to the nearest hundred. + Defaults to *0*. + + :return: The rounded number. + """ + return round(x=self, p=p) + + def run_udf(self, udf, runtime, version=UNSET, context=UNSET): + """ + Run an UDF + + :param self: The data to be passed to the UDF as array or raster data cube. + :param udf: Either source code, an absolute URL or a path to an UDF script. + :param runtime: An UDF runtime identifier available at the back-end. + :param version: An UDF runtime version. If set to `null`, the default runtime version specified for + each runtime is used. + :param context: Additional data such as configuration options that should be passed to the UDF. + + :return: The data processed by the UDF. * Returns a raster data cube, if a raster data cube is passed + for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system + and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any + data type, but is exactly what the UDF returns. + """ + return run_udf(data=self, udf=udf, runtime=runtime, version=version, context=context) + + def run_udf_externally(self, url, context=UNSET): + """ + Run an externally hosted UDF container + + :param self: The data to be passed to the UDF as array or raster data cube. + :param url: URL to a remote UDF service. + :param context: Additional data such as configuration options that should be passed to the UDF. + + :return: The data processed by the UDF service. * Returns a raster data cube, if a raster data cube is + passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference + system and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be + of any data type, but is exactly what the UDF returns. + """ + return run_udf_externally(data=self, url=url, context=context) + + def save_result(self, format, options=UNSET): + """ + Save processed data to storage + + :param self: The data to save. + :param format: The file format to save to. It must be one of the values that the server reports as + supported output file formats, which usually correspond to the short GDAL/OGR codes. If the format is + not suitable for storing the underlying data structure, a `FormatUnsuitable` exception will be thrown. + This parameter is *case insensitive*. + :param options: The file format parameters to be used to create the file(s). Must correspond to the + parameters that the server reports as supported parameters for the chosen `format`. The parameter names + and valid values usually correspond to the GDAL/OGR format options. + + :return: `false` if saving failed, `true` otherwise. + """ + return save_result(data=self, format=format, options=options) + + def sd(self, ignore_nodata=UNSET): + """ + Standard deviation + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The computed sample standard deviation. + """ + return sd(data=self, ignore_nodata=ignore_nodata) + + def sgn(self): + """ + Signum + + :param self: A number. + + :return: The computed signum value of `x`. + """ + return sgn(x=self) + + def sin(self): + """ + Sine + + :param self: An angle in radians. + + :return: The computed sine of `x`. + """ + return sin(x=self) + + def sinh(self): + """ + Hyperbolic sine + + :param self: An angle in radians. + + :return: The computed hyperbolic sine of `x`. + """ + return sinh(x=self) + + def sort(self, asc=UNSET, nodata=UNSET): + """ + Sort data + + :param self: An array with data to sort. + :param asc: The default sort order is ascending, with smallest values first. To sort in reverse + (descending) order, set this parameter to `false`. + :param nodata: Controls the handling of no-data values (`null`). By default they are removed. If + `true`, missing values in the data are put last; if `false`, they are put first. + + :return: The sorted array. + """ + return sort(data=self, asc=asc, nodata=nodata) + + def sqrt(self): + """ + Square root + + :param self: A number. + + :return: The computed square root. + """ + return sqrt(x=self) + + def subtract(self, y): + """ + Subtraction of two numbers + + :param self: The minuend. + :param y: The subtrahend. + + :return: The computed result. + """ + return subtract(x=self, y=y) + + def sum(self, ignore_nodata=UNSET): + """ + Compute the sum by adding up numbers + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The computed sum of the sequence of numbers. + """ + return sum(data=self, ignore_nodata=ignore_nodata) + + def tan(self): + """ + Tangent + + :param self: An angle in radians. + + :return: The computed tangent of `x`. + """ + return tan(x=self) + + def tanh(self): + """ + Hyperbolic tangent + + :param self: An angle in radians. + + :return: The computed hyperbolic tangent of `x`. + """ + return tanh(x=self) + + def text_begins(self, pattern, case_sensitive=UNSET): + """ + Text begins with another text + + :param self: Text in which to find something at the beginning. + :param pattern: Text to find at the beginning of `data`. Regular expressions are not supported. + :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. + + :return: `true` if `data` begins with `pattern`, false` otherwise. + """ + return text_begins(data=self, pattern=pattern, case_sensitive=case_sensitive) + + def text_contains(self, pattern, case_sensitive=UNSET): + """ + Text contains another text + + :param self: Text in which to find something in. + :param pattern: Text to find in `data`. Regular expressions are not supported. + :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. + + :return: `true` if `data` contains the `pattern`, false` otherwise. + """ + return text_contains(data=self, pattern=pattern, case_sensitive=case_sensitive) + + def text_ends(self, pattern, case_sensitive=UNSET): + """ + Text ends with another text + + :param self: Text in which to find something at the end. + :param pattern: Text to find at the end of `data`. Regular expressions are not supported. + :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. + + :return: `true` if `data` ends with `pattern`, false` otherwise. + """ + return text_ends(data=self, pattern=pattern, case_sensitive=case_sensitive) + + def text_merge(self, separator=UNSET): + """ + Concatenate elements to a string + + :param self: A set of elements. Numbers, boolean values and null values get converted to their (lower + case) string representation. For example: `1` (integer), `-1.5` (number), `true` / `false` (boolean + values) + :param separator: A separator to put between each of the individual texts. Defaults to an empty string. + + :return: Returns a string containing a string representation of all the array elements in the same + order, with the separator between each element. + """ + return text_merge(data=self, separator=separator) + + def trim_cube(self): + """ + Remove dimension labels with no-data values + + :param self: A raster data cube to trim. + + :return: A trimmed raster data cube with the same dimensions. The dimension properties name, type, + reference system and resolution remain unchanged. The number of dimension labels may decrease. + """ + return trim_cube(data=self) + + def variance(self, ignore_nodata=UNSET): + """ + Variance + + :param self: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. + Setting this flag to `false` considers no-data values so that `null` is returned if any value is such a + value. + + :return: The computed sample variance. + """ + return variance(data=self, ignore_nodata=ignore_nodata) + + def xor(self, y): + """ + Logical XOR (exclusive or) + + :param self: A boolean value. + :param y: A boolean value. + + :return: Boolean result of the logical XOR. + """ + return xor(x=self, y=y) + + +# Shortcut +process = ProcessBuilder.process + + +def absolute(x): + """ + Absolute value + + :param x: A number. + + :return: The computed absolute value. + """ + return process('absolute', x=x) + + +def add(x, y): + """ + Addition of two numbers + + :param x: The first summand. + :param y: The second summand. + + :return: The computed sum of the two numbers. + """ + return process('add', x=x, y=y) + + +def add_dimension(data, name, label, type=UNSET): + """ + Add a new dimension + + :param data: A data cube to add the dimension to. + :param name: Name for the dimension. + :param label: A dimension label. + :param type: The type of dimension, defaults to `other`. + + :return: The data cube with a newly added dimension. The new dimension has exactly one dimension label. All + other dimensions remain unchanged. + """ + return process('add_dimension', data=data, name=name, label=label, type=type) + + +def aggregate_spatial(data, geometries, reducer, target_dimension=UNSET, context=UNSET): + """ + Zonal statistics for geometries + + :param data: A raster data cube. The data cube implicitly gets restricted to the bounds of the geometries + as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters + immediately before this process. + :param geometries: Geometries as GeoJSON on which the aggregation will be based. + :param reducer: A reducer to be applied on all values of each geometry. A reducer is a single process such + as ``mean()`` or a set of processes, which computes a single value for a list of values, see the category + 'reducer' for such processes. + :param target_dimension: The new dimension name to be used for storing the results. Defaults to `result`. + :param context: Additional data to be passed to the reducer. + + :return: A vector data cube with the computed results and restricted to the bounds of the geometries. The + computed value is stored in dimension with the name that was specified in the parameter `target_dimension`. + The computation also stores information about the total count of pixels (valid + invalid pixels) and the + number of valid pixels (see ``is_valid()``) for each geometry. These values are stored as new dimension + with a dimension name derived from `target_dimension` by adding the suffix `_meta`. The new dimension has + the dimension labels `total_count` and `valid_count`. + """ + return process('aggregate_spatial', data=data, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) + + +def aggregate_spatial_binary(data, geometries, reducer, target_dimension=UNSET, context=UNSET): + """ + Zonal statistics for geometries by binary aggregation + + :param data: A raster data cube. The data cube implicitly gets restricted to the bounds of the geometries + as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters + immediately before this process. + :param geometries: Geometries as GeoJSON on which the aggregation will be based. + :param reducer: A reduction operator to be applied consecutively on tuples of values. It must be both + associative and commutative as the execution may be executed in parallel and therefore the order of + execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or consist of + multiple sub-processes. + :param target_dimension: The new dimension name to be used for storing the results. Defaults to `result`. + :param context: Additional data to be passed to the reducer. + + :return: A vector data cube with the computed results and restricted to the bounds of the geometries. The + computed value is stored in dimension with the name that was specified in the parameter `target_dimension`. + The computation also stores information about the total count of pixels (valid + invalid pixels) and the + number of valid pixels (see ``is_valid()``) for each geometry. These values are stored as new dimension + with a dimension name derived from `target_dimension` by adding the suffix `_meta`. The new dimension has + the dimension labels `total_count` and `valid_count`. + """ + return process('aggregate_spatial_binary', data=data, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) + + +def aggregate_temporal(data, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET): + """ + Temporal aggregations + + :param data: A data cube. + :param intervals: Left-closed temporal intervals, which are allowed to overlap. Each temporal interval in + the array has exactly two elements: 1. The first element is the start of the temporal interval. The + specified instance in time is **included** in the interval. 2. The second element is the end of the + temporal interval. The specified instance in time is **excluded** from the interval. The specified + temporal strings follow [RFC 3339](https://tools.ietf.org/html/rfc3339). Although [RFC 3339 prohibits the + hour to be '24'](https://tools.ietf.org/html/rfc3339#section-5.7), **this process allows the value '24' for + the hour** of an end time in order to make it possible that left-closed time intervals can fully cover the + day. + :param reducer: A reducer to be applied on all values along the specified dimension. A reducer is a single + process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see + the category 'reducer' for such processes. + :param labels: Distinct labels for the intervals, which can contain dates and/or times. Is only required to + be specified if the values for the start of the temporal intervals are not distinct and thus the default + labels would not be unique. The number of labels and the number of groups need to be equal. + :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is + passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is + expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more + dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the given + temporal dimension. + """ + return process('aggregate_temporal', data=data, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) + + +def aggregate_temporal_period(data, period, reducer, dimension=UNSET, context=UNSET): + """ + Temporal aggregations based on calendar hierarchies + + :param data: A data cube. + :param period: The time intervals to aggregate. The following pre-defined values are available: * `hour`: + Hour of the day * `day`: Day of the year * `week`: Week of the year * `dekad`: Ten day periods, counted per + year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The third dekad of the month + can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 each year. * `month`: Month + of the year * `season`: Three month periods of the calendar seasons (December - February, March - May, June + - August, September - November). * `tropical-season`: Six month periods of the tropical seasons (November - + April, May - October). * `year`: Proleptic years * `decade`: Ten year periods ([0-to-9 + decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the next year + ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 + decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the Anno Domini (AD) + calendar era, from a year ending in a 1 to the next year ending in a 0. + :param reducer: A reducer to be applied on all values along the specified dimension. A reducer is a single + process such as ``mean()`` or a set of processes, which computes a single value for a list of values, see + the category 'reducer' for such processes. + :param dimension: The name of the temporal dimension for aggregation. All data along the dimension is + passed through the specified reducer. If the dimension is not set or set to `null`, the data cube is + expected to only have one temporal dimension. Fails with a `TooManyDimensions` error if it has more + dimensions. Fails with a `DimensionNotAvailable` error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the given + temporal dimension. The specified temporal dimension has the following dimension labels (`YYYY` = four- + digit year, `MM` = two-digit month, `DD` two-digit day of month): * `hour`: `YYYY-MM-DD-00` - `YYYY-MM- + DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - `YYYY-52` * `dekad`: `YYYY-00` - `YYYY-36` * + `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), + `YYYY-jja` (June - August), `YYYY-son` (September - November). * `tropical-season`: `YYYY-ndjfma` (November + - April), `YYYY-mjjaso` (May - October). * `year`: `YYYY` * `decade`: `YYY0` * `decade-ad`: `YYY1` + """ + return process('aggregate_temporal_period', data=data, period=period, reducer=reducer, dimension=dimension, context=context) + + +def all(data, ignore_nodata=UNSET): + """ + Are all of the values true? + + :param data: A set of boolean values. + :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. + + :return: Boolean result of the logical operation. + """ + return process('all', data=data, ignore_nodata=ignore_nodata) + + +def and_(x, y): + """ + Logical AND + + :param x: A boolean value. + :param y: A boolean value. + + :return: Boolean result of the logical AND. + """ + return process('and_', x=x, y=y) + + +def anomaly(data, normals, period): + """ + Computes anomalies + + :param data: A data cube with exactly one temporal dimension and the following dimension labels for the + given period (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month): * `hour`: + `YYYY-MM-DD-00` - `YYYY-MM-DD-23` * `day`: `YYYY-001` - `YYYY-365` * `week`: `YYYY-01` - `YYYY-52` * + `dekad`: `YYYY-00` - `YYYY-36` * `month`: `YYYY-01` - `YYYY-12` * `season`: `YYYY-djf` (December - + February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November). * + `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October). * `year`: `YYYY` * + `decade`: `YYY0` * `decade-ad`: `YYY1` * `single-period` / `climatology-period`: Any + ``aggregate_temporal_period()`` can compute such a data cube. + :param normals: A data cube with normals, e.g. daily, monthly or yearly values computed from a process such + as ``climatological_normal()``. Must contain exactly one temporal dimension with the following dimension + labels for the given period: * `hour`: `00` - `23` * `day`: `001` - `365` * `week`: `01` - `52` * `dekad`: + `00` - `36` * `month`: `01` - `12` * `season`: `djf` (December - February), `mam` (March - May), `jja` + (June - August), `son` (September - November) * `tropical-season`: `ndjfma` (November - April), `mjjaso` + (May - October) * `year`: Four-digit year numbers * `decade`: Four-digit year numbers, the last digit being + a `0` * `decade-ad`: Four-digit year numbers, the last digit being a `1` * `single-period` / `climatology- + period`: A single dimension label with any name is expected. + :param period: Specifies the time intervals available in the normals data cube. The following options are + available: * `hour`: Hour of the day * `day`: Day of the year * `week`: Week of the year * `dekad`: Ten + day periods, counted per year with three periods per month (day 1 - 10, 11 - 20 and 21 - end of month). The + third dekad of the month can range from 8 to 11 days. For example, the fourth dekad is Feb, 1 - Feb, 10 + each year. * `month`: Month of the year * `season`: Three month periods of the calendar seasons (December - + February, March - May, June - August, September - November). * `tropical-season`: Six month periods of the + tropical seasons (November - April, May - October). * `year`: Proleptic years * `decade`: Ten year periods + ([0-to-9 decade](https://en.wikipedia.org/wiki/Decade#0-to-9_decade)), from a year ending in a 0 to the + next year ending in a 9. * `decade-ad`: Ten year periods ([1-to-0 + decade](https://en.wikipedia.org/wiki/Decade#1-to-0_decade)) better aligned with the Anno Domini (AD) + calendar era, from a year ending in a 1 to the next year ending in a 0. * `single-period` / `climatology- + period`: A single period of arbitrary length + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged. + """ + return process('anomaly', data=data, normals=normals, period=period) + + +def any(data, ignore_nodata=UNSET): + """ + Is at least one value true? + + :param data: A set of boolean values. + :param ignore_nodata: Indicates whether no-data values are ignored or not and ignores them by default. + + :return: Boolean result of the logical operation. + """ + return process('any', data=data, ignore_nodata=ignore_nodata) + + +def apply(data, process, context=UNSET): + """ + Apply a process to each pixel + + :param data: A data cube. + :param process: A unary process to be applied on each value, may consist of multiple sub-processes. + :param context: Additional data to be passed to the process. + + :return: A data cube with the newly computed values and the same dimensions. The dimension properties + (name, type, labels, reference system and resolution) remain unchanged. + """ + return process('apply', data=data, process=process, context=context) + + +def apply_dimension(data, process, dimension, target_dimension=UNSET, context=UNSET): + """ + Apply a process to pixels along a dimension + + :param data: A data cube. + :param process: Process to be applied on all pixel values. The specified process needs to accept an array + as parameter and must return an array with least one element. A process may consist of multiple sub- + processes. + :param dimension: The name of the source dimension to apply the process on. Fails with a + `DimensionNotAvailable` error if the specified dimension does not exist. + :param target_dimension: The name of the target dimension or `null` (the default) to use the source + dimension specified in the parameter `dimension`. By specifying a target dimension, the source dimension + is removed. The target dimension with the specified name and the type `other` (see ``add_dimension()``) is + created, if if doesn't exist yet. + :param context: Additional data to be passed to the process. + + :return: A data cube with the newly computed values. All dimensions stay the same, except for the + dimensions specified in corresponding parameters. There are three cases how the data cube changes: 1. The + source dimension **is** the target dimension: * The (number of) dimensions remain unchanged. * The + source dimension properties name, type and reference system remain unchanged. * The dimension labels and + the resolution are preserved when the number of pixel values in the source dimension is equal to the number + of values computed by the process. The other case is described below. 2. The source dimension **is not** + the target dimension and the latter **exists**: * The number of dimensions decreases by one as the + source dimension is dropped. * The target dimension properties name, type and reference system remain + unchanged. * The resolution changes, the number of dimension labels is equal to the number of values + computed by the process and the dimension labels are incrementing integers starting from zero 3. The source + dimension **is not** the target dimension and the latter **does not exist**: * The number of dimensions + remain unchanged, but the source dimension is replaced with the target dimension. * The target dimension + has the specified name and the type other. The reference system is not changed. * The resolution + changes, the number of dimension labels is equal to the number of values computed by the process and the + dimension labels are incrementing integers starting from zero For all three cases except for the exception + in the first case, the resolution changes, the number of dimension labels is equal to the number of values + computed by the process and the dimension labels are incrementing integers starting from zero. + """ + return process('apply_dimension', data=data, process=process, dimension=dimension, target_dimension=target_dimension, context=context) + + +def apply_kernel(data, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET): + """ + Apply a spatial convolution with a kernel + + :param data: A data cube. + :param kernel: Kernel as a two-dimensional array of weights. The inner level of the nested array aligns + with the `x` axis and the outer level aligns with the `y` axis. Each level of the kernel must have an + uneven number of elements, otherwise the process throws a `KernelDimensionsUneven` error. + :param factor: A factor that is multiplied to each value after the kernel has been applied. This is + basically a shortcut for explicitly multiplying each value by a factor afterwards, which is often required + for some kernel-based algorithms such as the Gaussian blur. + :param border: Determines how the data is extended when the kernel overlaps with the borders. Defaults to + fill the border with zeroes. The following options are available: * *numeric value* - fill with a user- + defined constant number `n`: `nnnnnn|abcdefgh|nnnnnn` (default, with `n` = 0) * `replicate` - repeat the + value from the pixel at the border: `aaaaaa|abcdefgh|hhhhhh` * `reflect` - mirror/reflect from the border: + `fedcba|abcdefgh|hgfedc` * `reflect_pixel` - mirror/reflect from the center of the pixel at the border: + `gfedcb|abcdefgh|gfedcb` * `wrap` - repeat/wrap the image: `cdefgh|abcdefgh|abcdef` + :param replace_invalid: This parameter specifies the value to replace non-numerical or infinite numerical + values with. By default, those values are replaced with zeroes. + + :return: A data cube with the newly computed values and the same dimensions. The dimension properties + (name, type, labels, reference system and resolution) remain unchanged. + """ + return process('apply_kernel', data=data, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) + + +def apply_neighborhood(data, process, size, overlap=UNSET, context=UNSET): + """ + Apply a process to pixels in a n-dimensional neighbourhood + + :param data: A data cube. + :param process: Process to be applied on all neighbourhoods. + :param size: Neighbourhood sizes along each dimension. This object maps dimension names to either a + physical measure (e.g. 100 m, 10 days) or pixels (e.g. 32 pixels). For dimensions not specified, the + default is to provide all values. Be aware that including all values from overly large dimensions may not + be processed at once. + :param overlap: Overlap of neighbourhoods along each dimension to avoid border effects. For instance a + temporal dimension can add 1 month before and after a neighbourhood. In the spatial dimensions, this is + often a number of pixels. The overlap specified is added before and after, so an overlap of 8 pixels will + add 8 pixels on both sides of the window, so 16 in total. Be aware that large overlaps increase the need + for computational resources and modifying overlapping data in subsequent operations have no effect. + :param context: Additional data to be passed to the process. + + :return: A data cube with the newly computed values and the same dimensions. The dimension properties + (name, type, labels, reference system and resolution) remain unchanged. + """ + return process('apply_neighborhood', data=data, process=process, size=size, overlap=overlap, context=context) + + +def arccos(x): + """ + Inverse cosine + + :param x: A number. + + :return: The computed angle in radians. + """ + return process('arccos', x=x) + + +def arcosh(x): + """ + Inverse hyperbolic cosine + + :param x: A number. + + :return: The computed angle in radians. + """ + return process('arcosh', x=x) + + +def arcsin(x): + """ + Inverse sine + + :param x: A number. + + :return: The computed angle in radians. + """ + return process('arcsin', x=x) + + +def arctan(x): + """ + Inverse tangent + + :param x: A number. + + :return: The computed angle in radians. + """ + return process('arctan', x=x) + + +def arctan2(y, x): + """ + Inverse tangent of two numbers + + :param y: A number to be used as dividend. + :param x: A number to be used as divisor. + + :return: The computed angle in radians. + """ + return process('arctan2', y=y, x=x) + + +def array_apply(data, process, context=UNSET): + """ + Applies a unary process to each array element + + :param data: An array. + :param process: A process to be applied on each value, may consist of multiple sub-processes. The specified + process must be unary meaning that it must work on a single value. + :param context: Additional data to be passed to the process. + + :return: An array with the newly computed values. The number of elements are the same as for the original + array. + """ + return process('array_apply', data=data, process=process, context=context) + + +def array_contains(data, value): + """ + Check whether the array contains a given value + + :param data: List to find the value in. + :param value: Value to find in `data`. + + :return: Returns `true` if the list contains the value, false` otherwise. + """ + return process('array_contains', data=data, value=value) + + +def array_element(data, index=UNSET, label=UNSET, return_nodata=UNSET): + """ + Get an element from an array + + :param data: An array. + :param index: The zero-based index of the element to retrieve. + :param label: The label of the element to retrieve. + :param return_nodata: By default this process throws an `ArrayElementNotAvailable` exception if the index + or label is invalid. If you want to return `null` instead, set this flag to `true`. + + :return: The value of the requested element. + """ + return process('array_element', data=data, index=index, label=label, return_nodata=return_nodata) + + +def array_filter(data, condition, context=UNSET): + """ + Filter an array based on a condition + + :param data: An array. + :param condition: A condition that is evaluated against each value in the array. Only the array elements + where the condition returns `true` are preserved. + :param context: Additional data to be passed to the condition. + + :return: An array filtered by the specified condition. The number of elements are less than or equal + compared to the original array. + """ + return process('array_filter', data=data, condition=condition, context=context) + + +def array_find(data, value): + """ + Get the index for a value in an array + + :param data: List to find the value in. + :param value: Value to find in `data`. + + :return: Returns the index of the first element with the specified value. If no element was found, `null` + is returned. + """ + return process('array_find', data=data, value=value) + + +def array_labels(data): + """ + Get the labels for an array + + :param data: An array with labels. + + :return: The labels as array. + """ + return process('array_labels', data=data) + + +def arsinh(x): + """ + Inverse hyperbolic sine + + :param x: A number. + + :return: The computed angle in radians. + """ + return process('arsinh', x=x) + + +def artanh(x): + """ + Inverse hyperbolic tangent + + :param x: A number. + + :return: The computed angle in radians. + """ + return process('artanh', x=x) + + +def between(x, min, max, exclude_max=UNSET): + """ + Between comparison + + :param x: The value to check. + :param min: Lower boundary (inclusive) to check against. + :param max: Upper boundary (inclusive) to check against. + :param exclude_max: Exclude the upper boundary `max` if set to `true`. Defaults to `false`. + + :return: `true` if `x` is between the specified bounds, otherwise `false`. + """ + return process('between', x=x, min=min, max=max, exclude_max=exclude_max) + + +def ceil(x): + """ + Round fractions up + + :param x: A number to round up. + + :return: The number rounded up. + """ + return process('ceil', x=x) + + +def climatological_normal(data, period, climatology_period=UNSET): + """ + Computes climatology normals + + :param data: A data cube with exactly one temporal dimension. The data cube must span at least the temporal + interval specified in the parameter `climatology-period`. Seasonal periods may span two consecutive years, + e.g. temporal winter that includes months December, January and February. If the required months before the + actual climate period are available, the season is taken into account. If not available, the first season + is not taken into account and the seasonal mean is based on one year less than the other seasonal normals. + The incomplete season at the end of the last year is never taken into account. + :param period: The time intervals to aggregate the average value for. The following pre-defined frequencies + are supported: * `day`: Day of the year * `month`: Month of the year * `climatology-period`: The period + specified in the `climatology-period`. * `season`: Three month periods of the calendar seasons (December - + February, March - May, June - August, September - November). * `tropical-season`: Six month periods of the + tropical seasons (November - April, May - October). + :param climatology_period: The climatology period as closed temporal interval. The first element of the + array is the first year to be fully included in the temporal interval. The second element is the last year + to be fully included in the temporal interval. The default period is from 1981 until 2010 (both inclusive). + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the temporal + dimension. The temporal dimension has the following dimension labels: * `day`: `001` - `365` * `month`: + `01` - `12` * `climatology-period`: `climatology-period` * `season`: `djf` (December - February), `mam` + (March - May), `jja` (June - August), `son` (September - November) * `tropical-season`: `ndjfma` (November + - April), `mjjaso` (May - October) + """ + return process('climatological_normal', data=data, period=period, climatology_period=climatology_period) + + +def clip(x, min, max): + """ + Clip a value between a minimum and a maximum + + :param x: A number. + :param min: Minimum value. If the value is lower than this value, the process will return the value of this + parameter. + :param max: Maximum value. If the value is greater than this value, the process will return the value of + this parameter. + + :return: The value clipped to the specified range. + """ + return process('clip', x=x, min=min, max=max) + + +def constant(x): + """ + Define a constant value + + :param x: The value of the constant. + + :return: The value of the constant. + """ + return process('constant', x=x) + + +def cos(x): + """ + Cosine + + :param x: An angle in radians. + + :return: The computed cosine of `x`. + """ + return process('cos', x=x) + + +def cosh(x): + """ + Hyperbolic cosine + + :param x: An angle in radians. + + :return: The computed hyperbolic cosine of `x`. + """ + return process('cosh', x=x) + + +def count(data, condition=UNSET, context=UNSET): + """ + Count the number of elements + + :param data: An array with elements of any data type. + :param condition: A condition consists of one ore more processes, which in the end return a boolean value. + It is evaluated against each element in the array. An element is counted only if the condition returns + `true`. Defaults to count valid elements in a list (see ``is_valid()``). Setting this parameter to boolean + `true` counts all elements in the list. + :param context: Additional data to be passed to the condition. + + :return: The counted number of elements. + """ + return process('count', data=data, condition=condition, context=context) + + +def create_raster_cube(): + """ + Create an empty raster data cube + + :return: An empty raster data cube with zero dimensions. + """ + return process('create_raster_cube', ) + + +def cummax(data, ignore_nodata=UNSET): + """ + Cumulative maxima + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is set for all the following elements. + + :return: An array with the computed cumulative maxima. + """ + return process('cummax', data=data, ignore_nodata=ignore_nodata) + + +def cummin(data, ignore_nodata=UNSET): + """ + Cumulative minima + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is set for all the following elements. + + :return: An array with the computed cumulative minima. + """ + return process('cummin', data=data, ignore_nodata=ignore_nodata) + + +def cumproduct(data, ignore_nodata=UNSET): + """ + Cumulative products + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is set for all the following elements. + + :return: An array with the computed cumulative products. + """ + return process('cumproduct', data=data, ignore_nodata=ignore_nodata) + + +def cumsum(data, ignore_nodata=UNSET): + """ + Cumulative sums + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is set for all the following elements. + + :return: An array with the computed cumulative sums. + """ + return process('cumsum', data=data, ignore_nodata=ignore_nodata) + + +def debug(data, code=UNSET, level=UNSET, message=UNSET): + """ + Publish debugging information + + :param data: Data to publish. + :param code: An identifier to help identify the log entry in a bunch of other log entries. + :param level: The severity level of this message, defaults to `info`. Note that the level `error` forces + the computation to be stopped! + :param message: A message to send in addition to the data. + + :return: Returns the data as passed to the `data` parameter. + """ + return process('debug', data=data, code=code, level=level, message=message) + + +def dimension_labels(data, dimension): + """ + Get the dimension labels + + :param data: The data cube. + :param dimension: The name of the dimension to get the labels for. + + :return: The labels as array. + """ + return process('dimension_labels', data=data, dimension=dimension) + + +def divide(x, y): + """ + Division of two numbers + + :param x: The dividend. + :param y: The divisor. + + :return: The computed result. + """ + return process('divide', x=x, y=y) + + +def drop_dimension(data, name): + """ + Remove a dimension + + :param data: The data cube to drop a dimension from. + :param name: Name of the dimension to drop. + + :return: A data cube without the specified dimension. The number of dimensions decreases by one, but the + dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain + unchanged. + """ + return process('drop_dimension', data=data, name=name) + + +def e(): + """ + Euler's number (e) + + :return: The numerical value of Euler's number. + """ + return process('e', ) + + +def eq(x, y, delta=UNSET, case_sensitive=UNSET): + """ + Equal to comparison + + :param x: First operand. + :param y: Second operand. + :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a positive + non-zero number the equality of two numbers is checked against a delta value. This is especially useful to + circumvent problems with floating point inaccuracy in machine-based computation. This option is basically + an alias for the following computation: `lte(abs(minus([x, y]), delta)` + :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be disabled + by setting this parameter to `false`. + + :return: Returns `true` if `x` is equal to `y`, `null` if any operand is `null`, otherwise `false`. + """ + return process('eq', x=x, y=y, delta=delta, case_sensitive=case_sensitive) + + +def exp(p): + """ + Exponentiation to the base e + + :param p: The numerical exponent. + + :return: The computed value for *e* raised to the power of `p`. + """ + return process('exp', p=p) + + +def extrema(data, ignore_nodata=UNSET): + """ + Minimum and maximum values + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that an array with two `null` values is returned if any + value is such a value. + + :return: An array containing the minimum and maximum values for the specified numbers. The first element is + the minimum, the second element is the maximum. If the input array is empty both elements are set to + `null`. + """ + return process('extrema', data=data, ignore_nodata=ignore_nodata) + + +def filter_bands(data, bands=UNSET, wavelengths=UNSET): + """ + Filter the bands by name + + :param data: A data cube with bands. + :param bands: A list of band names. Either the unique band name (metadata field `name` in bands) or one of + the common band names (metadata field `common_name` in bands). If unique band name and common name + conflict, the unique band name has higher priority. The order of the specified array defines the order of + the bands in the data cube. If multiple bands match a common name, all matched bands are included in the + original order. + :param wavelengths: A list of sub-lists with each sub-list consisting of two elements. The first element is + the minimum wavelength and the second element is the maximum wavelength. Wavelengths are specified in + micrometres (μm). The order of the specified array defines the order of the bands in the data cube. If + multiple bands match the wavelengths, all matched bands are included in the original order. + + :return: A data cube limited to a subset of its original bands. The dimensions and dimension properties + (name, type, labels, reference system and resolution) remain unchanged, except that the dimension of type + `bands` has less (or the same) dimension labels. + """ + return process('filter_bands', data=data, bands=bands, wavelengths=wavelengths) + + +def filter_bbox(data, extent): + """ + Spatial filter using a bounding box + + :param data: A data cube. + :param extent: A bounding box, which may include a vertical axis (see `base` and `height`). + + :return: A data cube restricted to the bounding box. The dimensions and dimension properties (name, type, + labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or + the same) dimension labels. + """ + return process('filter_bbox', data=data, extent=extent) + + +def filter_labels(data, condition, dimension, context=UNSET): + """ + Filter dimension labels based on a condition + + :param data: A data cube. + :param condition: A condition that is evaluated against each dimension label in the specified dimension. A + dimension label and the corresponding data is preserved for the given dimension, if the condition returns + `true`. + :param dimension: The name of the dimension to filter on. Fails with a `DimensionNotAvailable` error if the + specified dimension does not exist. + :param context: Additional data to be passed to the condition. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension + labels. + """ + return process('filter_labels', data=data, condition=condition, dimension=dimension, context=context) + + +def filter_spatial(data, geometries): + """ + Spatial filter using geometries + + :param data: A data cube. + :param geometries: One or more geometries used for filtering, specified as GeoJSON. + + :return: A data cube restricted to the specified geometries. The dimensions and dimension properties (name, + type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have + less (or the same) dimension labels. + """ + return process('filter_spatial', data=data, geometries=geometries) + + +def filter_temporal(data, extent, dimension=UNSET): + """ + Temporal filter for a temporal intervals + + :param data: A data cube. + :param extent: Left-closed temporal interval, i.e. an array with exactly two elements: 1. The first + element is the start of the temporal interval. The specified instance in time is **included** in the + interval. 2. The second element is the end of the temporal interval. The specified instance in time is + **excluded** from the interval. The specified temporal strings follow [RFC + 3339](https://tools.ietf.org/html/rfc3339). Also supports open intervals by setting one of the boundaries + to `null`, but never both. + :param dimension: The name of the temporal dimension to filter on. If the dimension is not set or is set to + `null`, the filter applies to all temporal dimensions. Fails with a `DimensionNotAvailable` error if the + specified dimension does not exist. + + :return: A data cube restricted to the specified temporal extent. The dimensions and dimension properties + (name, type, labels, reference system and resolution) remain unchanged, except that the given temporal + dimension(s) have less (or the same) dimension labels. + """ + return process('filter_temporal', data=data, extent=extent, dimension=dimension) + + +def first(data, ignore_nodata=UNSET): + """ + First element + + :param data: An array with elements of any data type. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if the first value is such a + value. + + :return: The first element of the input array. + """ + return process('first', data=data, ignore_nodata=ignore_nodata) + + +def floor(x): + """ + Round fractions down + + :param x: A number to round down. + + :return: The number rounded down. + """ + return process('floor', x=x) + + +def gt(x, y): + """ + Greater than comparison + + :param x: First operand. + :param y: Second operand. + + :return: `true` if `x` is strictly greater than `y` or `null` if any operand is `null`, otherwise `false`. + """ + return process('gt', x=x, y=y) + + +def gte(x, y): + """ + Greater than or equal to comparison + + :param x: First operand. + :param y: Second operand. + + :return: `true` if `x` is greater than or equal to `y`, `null` if any operand is `null`, otherwise `false`. + """ + return process('gte', x=x, y=y) + + +def if_(value, accept, reject=UNSET): + """ + If-Then-Else conditional + + :param value: A boolean value. + :param accept: A value that is returned if the boolean value is `true`. + :param reject: A value that is returned if the boolean value is **not** `true`. Defaults to `null`. + + :return: Either the `accept` or `reject` argument depending on the given boolean value. + """ + return process('if_', value=value, accept=accept, reject=reject) + + +def int(x): + """ + Integer part of a number + + :param x: A number. + + :return: Integer part of the number. + """ + return process('int', x=x) + + +def is_nan(x): + """ + Value is not a number + + :param x: The data to check. + + :return: `true` if the data is not a number, otherwise `false` + """ + return process('is_nan', x=x) + + +def is_nodata(x): + """ + Value is not a no-data value + + :param x: The data to check. + + :return: `true` if the data is a no-data value, otherwise `false` + """ + return process('is_nodata', x=x) + + +def is_valid(x): + """ + Value is valid data + + :param x: The data to check. + + :return: `true` if the data is valid, otherwise `false`. + """ + return process('is_valid', x=x) + + +def last(data, ignore_nodata=UNSET): + """ + Last element + + :param data: An array with elements of any data type. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if the last value is such a value. + + :return: The last element of the input array. + """ + return process('last', data=data, ignore_nodata=ignore_nodata) + + +def linear_scale_range(x, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET): + """ + Linear transformation between two ranges + + :param x: A number to transform. The number gets clipped to the bounds specified in `inputMin` and + `inputMax`. + :param inputMin: Minimum value the input can obtain. + :param inputMax: Maximum value the input can obtain. + :param outputMin: Minimum value of the desired output range. + :param outputMax: Maximum value of the desired output range. + + :return: The transformed number. + """ + return process('linear_scale_range', x=x, inputMin=inputMin, inputMax=inputMax, outputMin=outputMin, outputMax=outputMax) + + +def ln(x): + """ + Natural logarithm + + :param x: A number to compute the natural logarithm for. + + :return: The computed natural logarithm. + """ + return process('ln', x=x) + + +def load_collection(id, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET): + """ + Load a collection + + :param id: The collection id. + :param spatial_extent: Limits the data to load from the collection to the specified bounding box or + polygons. The process puts a pixel into the data cube if the point at the pixel center intersects with the + bounding box or any of the polygons (as defined in the Simple Features standard by the OGC). The GeoJSON + can be one of the following GeoJSON types: * A `Polygon` geometry, * a `GeometryCollection` containing + Polygons, * a `Feature` with a `Polygon` geometry or * a `FeatureCollection` containing `Feature`s with a + `Polygon` geometry. Set this parameter to `null` to set no limit for the spatial extent. Be careful with + this when loading large datasets! + :param temporal_extent: Limits the data to load from the collection to the specified left-closed temporal + interval. Applies to all temporal dimensions. The interval has to be specified as an array with exactly two + elements: 1. The first element is the start of the temporal interval. The specified instance in time is + **included** in the interval. 2. The second element is the end of the temporal interval. The specified + instance in time is **excluded** from the interval. The specified temporal strings follow [RFC + 3339](https://tools.ietf.org/html/rfc3339). Also supports open intervals by setting one of the boundaries + to `null`, but never both. Set this parameter to `null` to set no limit for the spatial extent. Be careful + with this when loading large datasets! + :param bands: Only adds the specified bands into the data cube so that bands that don't match the list of + band names are not available. Applies to all dimensions of type `bands`. Either the unique band name + (metadata field `name` in bands) or one of the common band names (metadata field `common_name` in bands) + can be specified. If unique band name and common name conflict, the unique band name has higher priority. + The order of the specified array defines the order of the bands in the data cube. f multiple bands match a + common name, all matched bands are included in the original order. + :param properties: Limits the data by metadata properties to include only data in the data cube which all + given conditions return `true` for (AND operation). Specify key-value-pairs with the key being the name of + the metadata property, which can be retrieved with the openEO Data Discovery for Collections. The value + must a condition (user-defined process) to be evaluated against the collection metadata, see the example. + + :return: A data cube for further processing. The dimensions and dimension properties (name, type, labels, + reference system and resolution) correspond to the collection's metadata, but the dimension labels are + restricted as specified in the parameters. + """ + return process('load_collection', id=id, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) + + +def load_result(id): + """ + Load batch job results + + :param id: The id of a batch job with results. + + :return: A data cube for further processing. + """ + return process('load_result', id=id) + + +def load_uploaded_files(paths, format, options=UNSET): + """ + Load files from the user workspace + + :param paths: The files to read. Folders can't be specified, instead specify all files. An error is thrown + if a file can't be read. + :param format: The file format to read from. It must be one of the values that the server reports as + supported input file formats, which usually correspond to the short GDAL/OGR codes. If the format is not + suitable for loading the data, a `FormatUnsuitable` exception will be thrown. This parameter is *case + insensitive*. + :param options: The file format parameters to be used to read the files. Must correspond to the parameters + that the server reports as supported parameters for the chosen `format`. The parameter names and valid + values usually correspond to the GDAL/OGR format options. + + :return: A data cube for further processing. + """ + return process('load_uploaded_files', paths=paths, format=format, options=options) + + +def log(x, base): + """ + Logarithm to a base + + :param x: A number to compute the logarithm for. + :param base: The numerical base. + + :return: The computed logarithm. + """ + return process('log', x=x, base=base) + + +def lt(x, y): + """ + Less than comparison + + :param x: First operand. + :param y: Second operand. + + :return: `true` if `x` is strictly less than `y`, `null` if any operand is `null`, otherwise `false`. + """ + return process('lt', x=x, y=y) + + +def lte(x, y): + """ + Less than or equal to comparison + + :param x: First operand. + :param y: Second operand. + + :return: `true` if `x` is less than or equal to `y`, `null` if any operand is `null`, otherwise `false`. + """ + return process('lte', x=x, y=y) + + +def mask(data, mask, replacement=UNSET): + """ + Apply a raster mask + + :param data: A raster data cube. + :param mask: A mask as raster data cube. Every pixel in `data` must have a corresponding element in `mask`. + :param replacement: The value used to replace masked values with. + + :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, + reference system and resolution) remain unchanged. + """ + return process('mask', data=data, mask=mask, replacement=replacement) + + +def mask_polygon(data, mask, replacement=UNSET, inside=UNSET): + """ + Apply a polygon mask + + :param data: A raster data cube. + :param mask: A GeoJSON object containing a polygon. The provided feature types can be one of the following: + * A `Polygon` geometry, * a `GeometryCollection` containing Polygons, * a `Feature` with a `Polygon` + geometry or * a `FeatureCollection` containing `Feature`s with a `Polygon` geometry. + :param replacement: The value used to replace masked values with. + :param inside: If set to `true` all pixels for which the point at the pixel center **does** intersect with + any polygon are replaced. + + :return: A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, + reference system and resolution) remain unchanged. + """ + return process('mask_polygon', data=data, mask=mask, replacement=replacement, inside=inside) + + +def max(data, ignore_nodata=UNSET): + """ + Maximum value + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The maximum value. + """ + return process('max', data=data, ignore_nodata=ignore_nodata) + + +def mean(data, ignore_nodata=UNSET): + """ + Arithmetic mean (average) + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The computed arithmetic mean. + """ + return process('mean', data=data, ignore_nodata=ignore_nodata) + + +def median(data, ignore_nodata=UNSET): + """ + Statistical median + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The computed statistical median. + """ + return process('median', data=data, ignore_nodata=ignore_nodata) + + +def merge_cubes(cube1, cube2, overlap_resolver=UNSET, context=UNSET): + """ + Merging two data cubes + + :param cube1: The first data cube. + :param cube2: The second data cube. + :param overlap_resolver: A reduction operator that resolves the conflict if the data overlaps. The reducer + must return a value of the same data type as the input values are. The reduction operator may be a single + process such as ``multiply()`` or consist of multiple sub-processes. `null` (the default) can be specified + if no overlap resolver is required. + :param context: Additional data to be passed to the overlap resolver. + + :return: The merged data cube. See the process description for details regarding the dimensions and + dimension properties (name, type, labels, reference system and resolution). + """ + return process('merge_cubes', cube1=cube1, cube2=cube2, overlap_resolver=overlap_resolver, context=context) + + +def min(data, ignore_nodata=UNSET): + """ + Minimum value + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The minimum value. + """ + return process('min', data=data, ignore_nodata=ignore_nodata) + + +def mod(x, y): + """ + Modulo + + :param x: A number to be used as dividend. + :param y: A number to be used as divisor. + + :return: The remainder after division. + """ + return process('mod', x=x, y=y) + + +def multiply(x, y): + """ + Multiplication of two numbers + + :param x: The multiplier. + :param y: The multiplicand. + + :return: The computed product of the two numbers. + """ + return process('multiply', x=x, y=y) + + +def ndvi(data, nir=UNSET, red=UNSET, target_band=UNSET): + """ + Normalized Difference Vegetation Index + + :param data: A raster data cube with two bands that have the common names `red` and `nir` assigned. + :param nir: The name of the NIR band. Defaults to the band that has the common name `nir` assigned. Either + the unique band name (metadata field `name` in bands) or one of the common band names (metadata field + `common_name` in bands) can be specified. If unique band name and common name conflict, the unique band + name has higher priority. + :param red: The name of the red band. Defaults to the band that has the common name `red` assigned. Either + the unique band name (metadata field `name` in bands) or one of the common band names (metadata field + `common_name` in bands) can be specified. If unique band name and common name conflict, the unique band + name has higher priority. + :param target_band: By default, the dimension of type `bands` is dropped. To keep the dimension specify a + new band name in this parameter so that a new dimension label with the specified name will be added for the + computed values. + + :return: A raster data cube containing the computed NDVI values. The structure of the data cube differs + depending on the value passed to `target_band`: * `target_band` is `null`: The data cube does not contain + the dimension of type `bands` any more, the number of dimensions decreases by one. The dimension properties + (name, type, labels, reference system and resolution) for all other dimensions remain unchanged. * + `target_band` is a string: The data cube keeps the same dimensions. The dimension properties remain + unchanged, but the number of dimension labels for the dimension of type `bands` increases by one. The + additional label is named as specified in `target_band`. + """ + return process('ndvi', data=data, nir=nir, red=red, target_band=target_band) + + +def neq(x, y, delta=UNSET, case_sensitive=UNSET): + """ + Not equal to comparison + + :param x: First operand. + :param y: Second operand. + :param delta: Only applicable for comparing two numbers. If this optional parameter is set to a positive + non-zero number the non-equality of two numbers is checked against a delta value. This is especially useful + to circumvent problems with floating point inaccuracy in machine-based computation. This option is + basically an alias for the following computation: `gt(abs(minus([x, y]), delta)` + :param case_sensitive: Only applicable for comparing two strings. Case sensitive comparison can be disabled + by setting this parameter to `false`. + + :return: Returns `true` if `x` is *not* equal to `y`, `null` if any operand is `null`, otherwise `false`. + """ + return process('neq', x=x, y=y, delta=delta, case_sensitive=case_sensitive) + + +def normalized_difference(x, y): + """ + Normalized difference + + :param x: The value for the first band. + :param y: The value for the second band. + + :return: The computed normalized difference. + """ + return process('normalized_difference', x=x, y=y) + + +def not_(x): + """ + Inverting a boolean + + :param x: Boolean value to invert. + + :return: Inverted boolean value. + """ + return process('not_', x=x) + + +def or_(x, y): + """ + Logical OR + + :param x: A boolean value. + :param y: A boolean value. + + :return: Boolean result of the logical OR. + """ + return process('or_', x=x, y=y) + + +def order(data, asc=UNSET, nodata=UNSET): + """ + Create a permutation + + :param data: An array to compute the order for. + :param asc: The default sort order is ascending, with smallest values first. To sort in reverse + (descending) order, set this parameter to `false`. + :param nodata: Controls the handling of no-data values (`null`). By default they are removed. If `true`, + missing values in the data are put last; if `false`, they are put first. + + :return: The computed permutation. + """ + return process('order', data=data, asc=asc, nodata=nodata) + + +def pi(): + """ + Pi (π) + + :return: The numerical value of Pi. + """ + return process('pi', ) + + +def power(base, p): + """ + Exponentiation + + :param base: The numerical base. + :param p: The numerical exponent. + + :return: The computed value for `base` raised to the power of `p`. + """ + return process('power', base=base, p=p) + + +def product(data, ignore_nodata=UNSET): + """ + Compute the product by multiplying numbers + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The computed product of the sequence of numbers. + """ + return process('product', data=data, ignore_nodata=ignore_nodata) + + +def quantiles(data, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET): + """ + Quantiles + + :param data: An array of numbers. + :param probabilities: A list of probabilities to calculate quantiles for. The probabilities must be between + 0 and 1. + :param q: A number of intervals to calculate quantiles for. Calculates q-quantiles with (nearly) equal- + sized intervals. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that an array with `null` values is returned if any + element is such a value. + + :return: An array with the computed quantiles. The list has either * as many elements as the given list of + `probabilities` had or * *`q`-1* elements. If the input array is empty the resulting array is filled with + as many `null` values as required according to the list above. For an example, see the 'Empty array + example'. + """ + return process('quantiles', data=data, probabilities=probabilities, q=q, ignore_nodata=ignore_nodata) + + +def rearrange(data, order): + """ + Rearrange an array based on a permutation + + :param data: The array to rearrange. + :param order: The permutation used for rearranging. + + :return: The rearranged array. + """ + return process('rearrange', data=data, order=order) + + +def reduce_dimension(data, reducer, dimension, context=UNSET): + """ + Reduce dimensions + + :param data: A data cube. + :param reducer: A reducer to apply on the specified dimension. A reducer is a single process such as + ``mean()`` or a set of processes, which computes a single value for a list of values, see the category + 'reducer' for such processes. + :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` + error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the newly computed values. It is missing the given dimension, the number of + dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) + for all other dimensions remain unchanged. + """ + return process('reduce_dimension', data=data, reducer=reducer, dimension=dimension, context=context) + + +def reduce_dimension_binary(data, reducer, dimension, context=UNSET): + """ + Reduce dimensions using binary reduction + + :param data: A data cube. + :param reducer: A reduction operator to be applied consecutively on pairs of values. It must be both + associative and commutative as the execution may be executed in parallel and therefore the order of + execution is arbitrary. The reduction operator may be a single process such as ``multiply()`` or consist of + multiple sub-processes. + :param dimension: The name of the dimension over which to reduce. Fails with a `DimensionNotAvailable` + error if the specified dimension does not exist. + :param context: Additional data to be passed to the reducer. + + :return: A data cube with the newly computed values. It is missing the given dimension, the number of + dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) + for all other dimensions remain unchanged. + """ + return process('reduce_dimension_binary', data=data, reducer=reducer, dimension=dimension, context=context) + + +def rename_dimension(data, source, target): + """ + Rename a dimension + + :param data: The data cube. + :param source: The current name of the dimension. Fails with a `DimensionNotAvailable` error if the + specified dimension does not exist. + :param target: A new Name for the dimension. Fails with a `DimensionExists` error if a dimension with the + specified name exists. + + :return: A data cube with the same dimensions, but the name of one of the dimensions changes. The old name + can not be referred to any longer. The dimension properties (name, type, labels, reference system and + resolution) remain unchanged. + """ + return process('rename_dimension', data=data, source=source, target=target) + + +def rename_labels(data, dimension, target, source=UNSET): + """ + Rename dimension labels + + :param data: The data cube. + :param dimension: The name of the dimension to rename the labels for. + :param target: The new names for the labels. The dimension labels in the data cube are expected to be + enumerated, if the parameter `target` is not specified. If a target dimension label already exists in the + data cube, a `LabelExists` error is thrown. + :param source: The names of the labels as they are currently in the data cube. The array defines an + unsorted and potentially incomplete list of labels that should be renamed to the names available in the + corresponding array elements in the parameter `target`. If one of the source dimension labels doesn't + exist, a `LabelNotAvailable` error is thrown. By default, the array is empty so that the dimension labels + in the data cube are expected to be enumerated. + + :return: The data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except that for the given dimension the labels change. The old + labels can not be referred to any longer. The number of labels remains the same. + """ + return process('rename_labels', data=data, dimension=dimension, target=target, source=source) + + +def resample_cube_spatial(data, target, method=UNSET): + """ + Resample the spatial dimensions to match a target data cube + + :param data: A data cube. + :param target: A data cube that describes the spatial target resolution. + :param method: Resampling method. Methods are inspired by GDAL, see + [gdalwarp](https://www.gdal.org/gdalwarp.html) for more information. + + :return: A data cube with the same dimensions. The dimension properties (name, type, labels, reference + system and resolution) remain unchanged, except for the resolution and dimension labels of the spatial + dimensions. + """ + return process('resample_cube_spatial', data=data, target=target, method=method) + + +def resample_cube_temporal(data, target, method, dimension=UNSET, context=UNSET): + """ + Resample a temporal dimension to match a target data cube + + :param data: A data cube. + :param target: A data cube that describes the temporal target resolution. + :param method: A resampling method to be applied, could be a reducer for downsampling or other methods for + upsampling. A reducer is a single process such as ``mean()`` or a set of processes, which computes a single + value for a list of values, see the category 'reducer' for such processes. + :param dimension: The name of the temporal dimension to resample, which must exist with this name in both + data cubes. If the dimension is not set or is set to `null`, the data cube is expected to only have one + temporal dimension. Fails with a `TooManyDimensions` error if it has more dimensions. Fails with a + `DimensionNotAvailable` error if the specified dimension does not exist. + :param context: Additional data to be passed to the process specified for the parameter `method`. + + :return: A raster data cube with the same dimensions and the same dimension properties (name, type, labels, + reference system and resolution) for all non-temporal dimensions. For the temporal dimension the name and + type remain unchanged, but the reference system changes and the labels and resolution may change. + """ + return process('resample_cube_temporal', data=data, target=target, method=method, dimension=dimension, context=context) + + +def resample_spatial(data, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET): + """ + Resample and warp the spatial dimensions + + :param data: A raster data cube. + :param resolution: Resamples the data cube to the target resolution, which can be specified either as + separate values for x and y or as a single value for both axes. Specified in the units of the target + projection. Doesn't change the resolution by default (`0`). + :param projection: Warps the data cube to the target projection, specified as as [EPSG + code](http://www.epsg-registry.org/), [WKT2 (ISO 19162) + string](http://docs.opengeospatial.org/is/18-010r7/18-010r7.html), [PROJ definition + (deprecated)](https://proj.org/usage/quickstart.html). By default (`null`), the projection is not changed. + :param method: Resampling method. Methods are inspired by GDAL, see + [gdalwarp](https://www.gdal.org/gdalwarp.html) for more information. + :param align: Specifies to which corner of the spatial extent the new resampled data is aligned to. + + :return: A raster data cube with values warped onto the new projection. It has the same dimensions and the + same dimension properties (name, type, labels, reference system and resolution) for all non-spatial or + vertical spatial dimensions. For the horizontal spatial dimensions the name and type remain unchanged, but + reference system, labels and resolution may change depending on the given parameters. + """ + return process('resample_spatial', data=data, resolution=resolution, projection=projection, method=method, align=align) + + +def round(x, p=UNSET): + """ + Round to a specified precision + + :param x: A number to round. + :param p: A positive number specifies the number of digits after the decimal point to round to. A negative + number means rounding to a power of ten, so for example *-2* rounds to the nearest hundred. Defaults to + *0*. + + :return: The rounded number. + """ + return process('round', x=x, p=p) + + +def run_udf(data, udf, runtime, version=UNSET, context=UNSET): + """ + Run an UDF + + :param data: The data to be passed to the UDF as array or raster data cube. + :param udf: Either source code, an absolute URL or a path to an UDF script. + :param runtime: An UDF runtime identifier available at the back-end. + :param version: An UDF runtime version. If set to `null`, the default runtime version specified for each + runtime is used. + :param context: Additional data such as configuration options that should be passed to the UDF. + + :return: The data processed by the UDF. * Returns a raster data cube, if a raster data cube is passed for + `data`. Details on the dimensions and dimension properties (name, type, labels, reference system and + resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any data + type, but is exactly what the UDF returns. + """ + return process('run_udf', data=data, udf=udf, runtime=runtime, version=version, context=context) + + +def run_udf_externally(data, url, context=UNSET): + """ + Run an externally hosted UDF container + + :param data: The data to be passed to the UDF as array or raster data cube. + :param url: URL to a remote UDF service. + :param context: Additional data such as configuration options that should be passed to the UDF. + + :return: The data processed by the UDF service. * Returns a raster data cube, if a raster data cube is + passed for `data`. Details on the dimensions and dimension properties (name, type, labels, reference system + and resolution) depend on the UDF. * If an array is passed for `data`, the returned value can be of any + data type, but is exactly what the UDF returns. + """ + return process('run_udf_externally', data=data, url=url, context=context) + + +def save_result(data, format, options=UNSET): + """ + Save processed data to storage + + :param data: The data to save. + :param format: The file format to save to. It must be one of the values that the server reports as + supported output file formats, which usually correspond to the short GDAL/OGR codes. If the format is not + suitable for storing the underlying data structure, a `FormatUnsuitable` exception will be thrown. This + parameter is *case insensitive*. + :param options: The file format parameters to be used to create the file(s). Must correspond to the + parameters that the server reports as supported parameters for the chosen `format`. The parameter names and + valid values usually correspond to the GDAL/OGR format options. + + :return: `false` if saving failed, `true` otherwise. + """ + return process('save_result', data=data, format=format, options=options) + + +def sd(data, ignore_nodata=UNSET): + """ + Standard deviation + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The computed sample standard deviation. + """ + return process('sd', data=data, ignore_nodata=ignore_nodata) + + +def sgn(x): + """ + Signum + + :param x: A number. + + :return: The computed signum value of `x`. + """ + return process('sgn', x=x) + + +def sin(x): + """ + Sine + + :param x: An angle in radians. + + :return: The computed sine of `x`. + """ + return process('sin', x=x) + + +def sinh(x): + """ + Hyperbolic sine + + :param x: An angle in radians. + + :return: The computed hyperbolic sine of `x`. + """ + return process('sinh', x=x) + + +def sort(data, asc=UNSET, nodata=UNSET): + """ + Sort data + + :param data: An array with data to sort. + :param asc: The default sort order is ascending, with smallest values first. To sort in reverse + (descending) order, set this parameter to `false`. + :param nodata: Controls the handling of no-data values (`null`). By default they are removed. If `true`, + missing values in the data are put last; if `false`, they are put first. + + :return: The sorted array. + """ + return process('sort', data=data, asc=asc, nodata=nodata) + + +def sqrt(x): + """ + Square root + + :param x: A number. + + :return: The computed square root. + """ + return process('sqrt', x=x) + + +def subtract(x, y): + """ + Subtraction of two numbers + + :param x: The minuend. + :param y: The subtrahend. + + :return: The computed result. + """ + return process('subtract', x=x, y=y) + + +def sum(data, ignore_nodata=UNSET): + """ + Compute the sum by adding up numbers + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The computed sum of the sequence of numbers. + """ + return process('sum', data=data, ignore_nodata=ignore_nodata) + + +def tan(x): + """ + Tangent + + :param x: An angle in radians. + + :return: The computed tangent of `x`. + """ + return process('tan', x=x) + + +def tanh(x): + """ + Hyperbolic tangent + + :param x: An angle in radians. + + :return: The computed hyperbolic tangent of `x`. + """ + return process('tanh', x=x) + + +def text_begins(data, pattern, case_sensitive=UNSET): + """ + Text begins with another text + + :param data: Text in which to find something at the beginning. + :param pattern: Text to find at the beginning of `data`. Regular expressions are not supported. + :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. + + :return: `true` if `data` begins with `pattern`, false` otherwise. + """ + return process('text_begins', data=data, pattern=pattern, case_sensitive=case_sensitive) + + +def text_contains(data, pattern, case_sensitive=UNSET): + """ + Text contains another text + + :param data: Text in which to find something in. + :param pattern: Text to find in `data`. Regular expressions are not supported. + :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. + + :return: `true` if `data` contains the `pattern`, false` otherwise. + """ + return process('text_contains', data=data, pattern=pattern, case_sensitive=case_sensitive) + + +def text_ends(data, pattern, case_sensitive=UNSET): + """ + Text ends with another text + + :param data: Text in which to find something at the end. + :param pattern: Text to find at the end of `data`. Regular expressions are not supported. + :param case_sensitive: Case sensitive comparison can be disabled by setting this parameter to `false`. + + :return: `true` if `data` ends with `pattern`, false` otherwise. + """ + return process('text_ends', data=data, pattern=pattern, case_sensitive=case_sensitive) + + +def text_merge(data, separator=UNSET): + """ + Concatenate elements to a string + + :param data: A set of elements. Numbers, boolean values and null values get converted to their (lower case) + string representation. For example: `1` (integer), `-1.5` (number), `true` / `false` (boolean values) + :param separator: A separator to put between each of the individual texts. Defaults to an empty string. + + :return: Returns a string containing a string representation of all the array elements in the same order, + with the separator between each element. + """ + return process('text_merge', data=data, separator=separator) + + +def trim_cube(data): + """ + Remove dimension labels with no-data values + + :param data: A raster data cube to trim. + + :return: A trimmed raster data cube with the same dimensions. The dimension properties name, type, + reference system and resolution remain unchanged. The number of dimension labels may decrease. + """ + return process('trim_cube', data=data) + + +def variance(data, ignore_nodata=UNSET): + """ + Variance + + :param data: An array of numbers. + :param ignore_nodata: Indicates whether no-data values are ignored or not. Ignores them by default. Setting + this flag to `false` considers no-data values so that `null` is returned if any value is such a value. + + :return: The computed sample variance. + """ + return process('variance', data=data, ignore_nodata=ignore_nodata) + + +def xor(x, y): + """ + Logical XOR (exclusive or) + + :param x: A boolean value. + :param y: A boolean value. + + :return: Boolean result of the logical XOR. + """ + return process('xor', x=x, y=y) + + diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index 622cb565c..d13a17f4f 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -17,13 +17,13 @@ import shapely.geometry import shapely.geometry.base from deprecated import deprecated -from openeo.rest.processbuilder import ProcessBuilder +from openeo.processes.processes import ProcessBuilder from shapely.geometry import Polygon, MultiPolygon, mapping import openeo from openeo.imagecollection import ImageCollection, CollectionMetadata from openeo.internal.graph_building import PGNode, ReduceNode -from openeo.rest import BandMathException, OperatorException +from openeo.rest import BandMathException, OperatorException, OpenEoClientException from openeo.rest.job import RESTJob from openeo.rest.udp import RESTUserDefinedProcess from openeo.util import get_temporal_extent, dict_no_none @@ -540,11 +540,10 @@ def apply_dimension( arguments={"data": {"from_parameter": "data"}}, ) elif isinstance(process, typing.Callable): - builder = ProcessBuilder() - callback_graph = process(builder) - callback_process_node = callback_graph.pgnode + builder = process(ProcessBuilder.from_parameter("data")) + callback_process_node = builder.pgnode else: - callback_process_node = None + raise OpenEoClientException("No code or process given") arguments = { "data": self._pg, "process": PGNode.to_process_graph_argument(callback_process_node), @@ -571,9 +570,7 @@ def reduce_dimension(self, dimension: str, reducer: Union[typing.Callable, str], # Assume given reducer is a simple predefined reduce process_id reducer = PGNode(process_id=reducer, arguments={"data": {"from_parameter": "data"}}) elif isinstance(reducer, typing.Callable): - builder = ProcessBuilder() - callback_graph = reducer(builder) - reducer = callback_graph.pgnode + reducer = reducer(ProcessBuilder.from_parameter("data")).pgnode return self.process_with_node(ReduceNode( process_id=process_id, @@ -665,9 +662,8 @@ def apply_neighborhood(self, size:List[Dict],overlap:List[Dict]=[],process:PGNod arguments=args )) if isinstance(process, typing.Callable): - builder = ProcessBuilder() - callback_graph = process(builder) - result_cube.processgraph_node.arguments['process'] = {'process_graph': callback_graph.pgnode} + process_builder = process(ProcessBuilder.from_parameter("data")) + result_cube.processgraph_node.arguments['process'] = {'process_graph': process_builder.pgnode} return result_cube @@ -694,9 +690,8 @@ def apply(self, process: Union[str, PGNode]=None, data_argument='x') -> 'DataCub } )) if isinstance(process, typing.Callable): - builder = ProcessBuilder(parent_data_parameter="x") - callback_graph = process(builder) - result_cube.processgraph_node.arguments['process'] = {'process_graph': callback_graph.pgnode} + process_builder = process(ProcessBuilder.from_parameter("x")) + result_cube.processgraph_node.arguments['process'] = {'process_graph': process_builder.pgnode} return result_cube @@ -923,10 +918,11 @@ def merge(self, other: 'DataCube', overlap_resolver: Union[str, typing.Callable] arguments={"data": [{"from_parameter": "x"}, {"from_parameter": "y"}]} ) elif isinstance(overlap_resolver,typing.Callable): - builder = ProcessBuilder() - callback_graph = overlap_resolver(builder) - overlap_resolver_node = callback_graph.pgnode - elif isinstance(overlap_resolver,PGNode): + process_builder = overlap_resolver( + ProcessBuilder.from_parameter("x"), ProcessBuilder.from_parameter("y") + ) + overlap_resolver_node = process_builder.pgnode + elif isinstance(overlap_resolver, PGNode): overlap_resolver_node = overlap_resolver else: raise ValueError("Unsupported overlap_resolver: %s" % str(overlap_resolver)) diff --git a/openeo/rest/processbuilder.py b/openeo/rest/processbuilder.py deleted file mode 100644 index b8de6140e..000000000 --- a/openeo/rest/processbuilder.py +++ /dev/null @@ -1,95 +0,0 @@ - -from openeo import ImageCollection -from openeo.internal.graph_building import PGNode - -def max(data:'ProcessBuilder',ignore_nodata=True) -> 'ProcessBuilder': - """ - Computes the largest value of an array of numbers, which is is equal to the first element of a sorted (i.e., ordered) version the array. - - An array without non-null elements resolves always with null. - - @param ignore_nodata: - @return: - """ - return data.max(ignore_nodata) - -def array_element(data,index=None,label=None,return_nodata=None): - args = {'data': data._ancestor()} - if index is not None: - args['index']=index - elif label is not None: - args['label'] = label - else: - raise ValueError("Either the index or label argument should be specified.") - - if return_nodata is not None: - args['return_nodata'] = return_nodata - return data.process('array_element', args) - - -def add(x:'ProcessBuilder',y:'ProcessBuilder') -> 'ProcessBuilder': - - args = { - 'x': x._ancestor(), - 'y': y._ancestor() - } - return x.process('add', args) - - -class ProcessBuilder(ImageCollection): - """ - An object to construct process graphs for callbacks. - """ - - PARENT='PARENT' - - def __init__(self, pgnode=None, parent_data_parameter='data'): - """ - - @param pgnode: - @param parent_data_parameter: - """ - self.pgnode = pgnode - self.parent_data_parameter = parent_data_parameter - - def _ancestor(self): - if self.pgnode is not None: - return self.pgnode - else: - return {'from_parameter': self.parent_data_parameter} - - def run_udf(self,code=str,runtime:str=""): - return self.process('run_udf',{'data':self._ancestor(),'udf':code,'runtime':runtime}) - - def absolute(self): - return self.process('absolute', {'x': self._ancestor()}) - - def __add__(self, other) -> 'ProcessBuilder': - return add(self,other) - - def max(self,ignore_nodata=True): - """ - Computes the largest value of an array of numbers, which is is equal to the first element of a sorted (i.e., ordered) version the array. - - An array without non-null elements resolves always with null. - - @param ignore_nodata: - @return: - """ - return self.process('max', {'data': self._ancestor(), 'ignore_nodata':ignore_nodata}) - - def process(self, process_id: str, arguments: dict = None, **kwargs) -> 'ProcessBuilder': - """ - Generic helper to create a new DataCube by applying a process. - - :param process_id: process id of the process. - :param arguments: argument dictionary for the process. - :return: new DataCube instance - """ - arguments = {**(arguments or {}), **kwargs} - return ProcessBuilder(PGNode(process_id=process_id, arguments=arguments)) - - - -def absolute(data: ProcessBuilder) -> ProcessBuilder: - return data.absolute() diff --git a/tests/processes/__init__.py b/tests/processes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/processes/test_generator.py b/tests/processes/test_generator.py new file mode 100644 index 000000000..92e905e81 --- /dev/null +++ b/tests/processes/test_generator.py @@ -0,0 +1,123 @@ +from textwrap import dedent + +from openeo.processes.generator import PythonRenderer +from openeo.processes.parse import Process + + +def test_render_basic(): + process = Process.from_dict({ + "id": "incr", + "description": "Increment a value", + "summary": "Increment a value", + "parameters": [{"name": "x", "description": "value", "schema": {"type": "integer"}}], + "returns": {"description": "incremented value", "schema": {"type": "integer"}} + }) + + renderer = PythonRenderer() + src = renderer.render_process(process) + assert src == dedent('''\ + def incr(x): + """ + Increment a value + + :param x: value + + :return: incremented value + """ + return process('incr', x=x)''') + + +def test_render_no_params(): + process = Process.from_dict({ + "id": "pi", + "description": "Pi", + "summary": "Pi", + "parameters": [], + "returns": {"description": "value of pi", "schema": {"type": "number"}} + }) + + renderer = PythonRenderer() + src = renderer.render_process(process) + assert src == dedent('''\ + def pi(): + """ + Pi + + :return: value of pi + """ + return process('pi', )''') + + +def test_render_with_default(): + process = Process.from_dict({ + "id": "incr", + "description": "Increment a value", + "summary": "Increment a value", + "parameters": [ + {"name": "x", "description": "value", "schema": {"type": "integer"}}, + {"name": "i", "description": "increment", "schema": {"type": "integer"}, "default": 1}, + ], + "returns": {"description": "incremented value", "schema": {"type": "integer"}} + }) + + renderer = PythonRenderer() + src = renderer.render_process(process) + assert src == dedent('''\ + def incr(x, i=1): + """ + Increment a value + + :param x: value + :param i: increment + + :return: incremented value + """ + return process('incr', x=x, i=i)''') + + +def test_render_with_optional(): + process = Process.from_dict({ + "id": "foo", + "description": "Foo", + "summary": "Foo", + "parameters": [ + {"name": "x", "description": "value", "schema": {"type": "integer"}}, + {"name": "y", "description": "something", "schema": {"type": "integer"}, "optional": True, "default": 1}, + ], + "returns": {"description": "new value", "schema": {"type": "integer"}} + }) + + renderer = PythonRenderer(optional_default="UNSET") + src = renderer.render_process(process) + assert src == dedent('''\ + def foo(x, y=UNSET): + """ + Foo + + :param x: value + :param y: something + + :return: new value + """ + return process('foo', x=x, y=y)''') + + +def test_render_oo_no_params(): + process = Process.from_dict({ + "id": "pi", + "description": "Pi", + "summary": "Pi", + "parameters": [], + "returns": {"description": "value of pi", "schema": {"type": "number"}} + }) + + renderer = PythonRenderer(oo_mode=True) + src = renderer.render_process(process) + assert src == dedent('''\ + def pi(self): + """ + Pi + + :return: value of pi + """ + return process('pi', )''') diff --git a/tests/processes/test_parse.py b/tests/processes/test_parse.py new file mode 100644 index 000000000..3753b5d8a --- /dev/null +++ b/tests/processes/test_parse.py @@ -0,0 +1,100 @@ +from openeo.processes.parse import Parameter, Schema, Returns, Process + + +def test_schema(): + s = Schema.from_dict({"type": "number"}) + assert s.schema == {"type": "number"} + + +def test_parameter(): + p = Parameter.from_dict({ + "name": "foo", + "description": "Foo amount", + "schema": {"type": "number"}, + }) + assert p.name == "foo" + assert p.description == "Foo amount" + assert p.schema.schema == {"type": "number"} + assert p.default is Parameter.NO_DEFAULT + assert p.optional is False + + +def test_parameter_default(): + p = Parameter.from_dict({ + "name": "foo", + "description": "Foo amount", + "schema": {"type": "number"}, + "default": 5 + }) + assert p.default == 5 + + +def test_parameter_default_none(): + p = Parameter.from_dict({ + "name": "foo", + "description": "Foo amount", + "schema": {"type": "number"}, + "default": None + }) + assert p.default is None + + +def test_returns(): + r = Returns.from_dict({ + "description": "Roo", + "schema": {"type": "number"} + }) + assert r.schema.schema == {"type": "number"} + assert r.description == "Roo" + + +def test_process(): + p = Process.from_dict({ + "id": "absolute", + "summary": "Absolute value", + "description": "Computes the absolute value of a real number.", + "categories": ["math"], + "parameters": [ + {"name": "x", "description": "A number.", "schema": {"type": ["number", "null"]}}, + ], + "returns": { + "description": "The computed absolute value.", + "schema": {"type": ["number", "null"], "minimum": 0} + }, + "links": [{"rel": "about", "href": "http://example.com/abs.html"}], + }) + + assert p.id == "absolute" + assert p.description == "Computes the absolute value of a real number." + assert p.summary == "Absolute value" + assert len(p.parameters) == 1 + assert p.parameters[0].name == "x" + assert p.parameters[0].description == "A number." + assert p.parameters[0].schema.schema == {"type": ["number", "null"]} + assert p.returns.description == "The computed absolute value." + assert p.returns.schema.schema == {"type": ["number", "null"], "minimum": 0} + + +def test_process_from_json(): + p = Process.from_json('''{ + "id": "absolute", + "summary": "Absolute value", + "description": "Computes the absolute value of a real number.", + "categories": ["math"], + "parameters": [ + {"name": "x", "description": "A number.", "schema": {"type": ["number", "null"]}} + ], + "returns": { + "description": "The computed absolute value.", + "schema": {"type": ["number", "null"], "minimum": 0} + } + }''') + assert p.id == "absolute" + assert p.description == "Computes the absolute value of a real number." + assert p.summary == "Absolute value" + assert len(p.parameters) == 1 + assert p.parameters[0].name == "x" + assert p.parameters[0].description == "A number." + assert p.parameters[0].schema.schema == {"type": ["number", "null"]} + assert p.returns.description == "The computed absolute value." + assert p.returns.schema.schema == {"type": ["number", "null"], "minimum": 0} diff --git a/tests/rest/datacube/test_datacube100.py b/tests/rest/datacube/test_datacube100.py index cb70fdcd6..fa2826a8c 100644 --- a/tests/rest/datacube/test_datacube100.py +++ b/tests/rest/datacube/test_datacube100.py @@ -7,8 +7,8 @@ import shapely.geometry import openeo.metadata -from openeo.internal.graph_building import PGNode from openeo import UDF +from openeo.internal.graph_building import PGNode from openeo.rest.connection import Connection from openeo.rest.datacube import THIS from .conftest import API_URL @@ -254,21 +254,6 @@ def test_apply_absolute_pgnode(con100): assert result.graph == expected_graph -def test_apply_absolute_callback_lambda_method(con100): - im = con100.load_collection("S2") - result = im.apply(lambda data: data.absolute()) - expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') - assert result.graph == expected_graph - - -def test_apply_absolute_callback_function(con100): - im = con100.load_collection("S2") - from openeo.rest.processbuilder import absolute - result = im.apply(absolute) - expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') - assert result.graph == expected_graph - - def test_load_collection_properties(con100): # TODO: put this somewhere and expose it to the user? def eq(value, case_sensitive=True) -> PGNode: @@ -311,7 +296,7 @@ def test_apply_neighborhood_udf(con100): {'dimension': 'y', 'value': 128, 'unit': 'px'} ], overlap=[ {'dimension': 't', 'value': 'P10d'}, - ],process= lambda data:data.run_udf(code="myfancycode", runtime="Python")) + ],process= lambda data:data.run_udf(udf="myfancycode", runtime="Python")) actual_graph = neighbors.graph['applyneighborhood1'] assert actual_graph == {'arguments': {'data': {'from_node': 'loadcollection1'}, 'overlap': [{'dimension': 't', 'value': 'P10d'}], diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index 1643c5532..b71a81df3 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -1,27 +1,61 @@ -from openeo.rest.processbuilder import ProcessBuilder +from openeo.processes.processes import ProcessBuilder + +from ... import load_json_resource + + +def test_apply_absolute_callback_lambda_method(con100): + im = con100.load_collection("S2") + result = im.apply(lambda data: data.absolute()) + expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') + assert result.graph == expected_graph + + +def test_apply_absolute_callback_function(con100): + im = con100.load_collection("S2") + from openeo.processes.processes import absolute + result = im.apply(absolute) + expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') + assert result.graph == expected_graph + + +def test_apply_absolute_callback_function_custom(con100): + def abs(x: ProcessBuilder) -> ProcessBuilder: + return x.absolute() + + im = con100.load_collection("S2") + result = im.apply(abs) + expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') + assert result.graph == expected_graph def check_apply_neighbors(neighbors): actual_graph = neighbors.graph['applyneighborhood1'] - assert actual_graph == {'arguments': {'data': {'from_node': 'loadcollection1'}, - 'overlap': [{'dimension': 't', 'value': 'P10d'}], - 'process': {'process_graph': {'runudf1': {'arguments': {'udf': 'myfancycode', - 'data': { - 'from_parameter': 'data'}, - 'runtime': 'Python'}, - 'process_id': 'run_udf', - 'result': True}}}, - 'size': [{'dimension': 'x', 'unit': 'px', 'value': 128}, - {'dimension': 'y', 'unit': 'px', 'value': 128}]}, - 'process_id': 'apply_neighborhood', - 'result': True} + assert actual_graph == { + 'process_id': 'apply_neighborhood', + 'arguments': { + 'data': {'from_node': 'loadcollection1'}, + 'overlap': [{'dimension': 't', 'value': 'P10d'}], + 'process': {'process_graph': { + 'runudf1': { + 'process_id': 'run_udf', + 'arguments': { + 'udf': 'myfancycode', + 'data': {'from_parameter': 'data'}, + 'runtime': 'Python', + }, + 'result': True + } + }}, + 'size': [{'dimension': 'x', 'unit': 'px', 'value': 128}, {'dimension': 'y', 'unit': 'px', 'value': 128}]}, + 'result': True + } def test_apply_neighborhood_udf_callback(con100): collection = con100.load_collection("S2") - def callback(data:ProcessBuilder): - return data.run_udf(code='myfancycode', runtime='Python') + def callback(data: ProcessBuilder): + return data.run_udf(udf='myfancycode', runtime='Python') neighbors = collection.apply_neighborhood(process=callback, size=[ {'dimension': 'x', 'value': 128, 'unit': 'px'}, @@ -35,32 +69,37 @@ def callback(data:ProcessBuilder): def test_apply_neighborhood_complex_callback(con100): collection = con100.load_collection("S2") - from openeo.rest.processbuilder import max - neighbors = collection.apply_neighborhood(process=lambda data:max(data).absolute(), size=[ + from openeo.processes.processes import max + neighbors = collection.apply_neighborhood(process=lambda data: max(data).absolute(), size=[ {'dimension': 'x', 'value': 128, 'unit': 'px'}, {'dimension': 'y', 'value': 128, 'unit': 'px'} ], overlap=[ {'dimension': 't', 'value': 'P10d'}, ]) actual_graph = neighbors.graph['applyneighborhood1'] - assert actual_graph == {'arguments': {'data': {'from_node': 'loadcollection1'}, - 'overlap': [{'dimension': 't', 'value': 'P10d'}], - 'process': {'process_graph': { - 'absolute1': {'arguments': {'x': {'from_node': 'max1'}}, - 'process_id': 'absolute', - 'result': True}, - 'max1': {'arguments': {'data': {'from_parameter': 'data'}, - 'ignore_nodata': True}, - 'process_id': 'max'}} - }, - 'size': [{'dimension': 'x', 'unit': 'px', 'value': 128}, - {'dimension': 'y', 'unit': 'px', 'value': 128}]}, - 'process_id': 'apply_neighborhood', - 'result': True} + assert actual_graph == { + 'process_id': 'apply_neighborhood', + 'arguments': { + 'data': {'from_node': 'loadcollection1'}, + 'overlap': [{'dimension': 't', 'value': 'P10d'}], + 'process': {'process_graph': { + 'max1': { + 'process_id': 'max', + 'arguments': {'data': {'from_parameter': 'data'}}, + }, + 'absolute1': { + 'process_id': 'absolute', + 'arguments': {'x': {'from_node': 'max1'}}, + 'result': True + }, + }}, + 'size': [{'dimension': 'x', 'unit': 'px', 'value': 128}, {'dimension': 'y', 'unit': 'px', 'value': 128}]}, + 'result': True + } def test_apply_dimension_bandmath(con100): - from openeo.rest.processbuilder import array_element + from openeo.processes.processes import array_element collection = con100.load_collection("S2") bandsum = collection.apply_dimension( @@ -97,48 +136,68 @@ def test_apply_dimension_bandmath(con100): def test_reduce_dimension(con100): collection = con100.load_collection("S2") - from openeo.rest.processbuilder import array_element + from openeo.processes.processes import array_element - bandsum = collection.reduce_dimension(dimension='bands',reducer=lambda data:array_element(data,index=1) + array_element(data,index=2)) + bandsum = collection.reduce_dimension( + dimension='bands', + reducer=lambda data: array_element(data, index=1) + array_element(data, index=2) + ) actual_graph = bandsum.graph['reducedimension1'] - assert actual_graph == {'arguments': {'data': {'from_node': 'loadcollection1'}, - 'dimension': 'bands', - 'reducer': {'process_graph': {'add1': {'arguments': {'x': {'from_node': 'arrayelement1'}, - 'y': {'from_node': 'arrayelement2'}}, - 'process_id': 'add', - 'result': True}, - 'arrayelement1': {'arguments': {'data': {'from_parameter': 'data'}, - 'index': 1}, - 'process_id': 'array_element'}, - 'arrayelement2': {'arguments': {'data': {'from_parameter': 'data'}, - 'index': 2}, - 'process_id': 'array_element'}}}}, - 'process_id': 'reduce_dimension', - 'result': True} - + assert actual_graph == { + 'arguments': { + 'data': {'from_node': 'loadcollection1'}, + 'dimension': 'bands', + 'reducer': {'process_graph': { + 'arrayelement1': { + 'process_id': 'array_element', + 'arguments': {'data': {'from_parameter': 'data'}, 'index': 1}, + }, + 'arrayelement2': { + 'process_id': 'array_element', + 'arguments': {'data': {'from_parameter': 'data'}, 'index': 2}, + }, + 'add1': { + 'arguments': {'x': {'from_node': 'arrayelement1'}, 'y': {'from_node': 'arrayelement2'}}, + 'process_id': 'add', + 'result': True + }, + }}, + }, + 'process_id': 'reduce_dimension', + 'result': True} def test_apply_dimension(con100): collection = con100.load_collection("S2") - from openeo.rest.processbuilder import array_element + from openeo.processes.processes import array_element - bandsum = collection.apply_dimension(dimension='bands',process=lambda data:array_element(data,index=1) + array_element(data,index=2)) + bandsum = collection.apply_dimension( + dimension='bands', + process=lambda data: array_element(data, index=1) + array_element(data, index=2) + ) actual_graph = bandsum.graph['applydimension1'] - assert actual_graph == {'arguments': {'data': {'from_node': 'loadcollection1'}, - 'dimension': 'bands', - 'process': {'process_graph': {'add1': {'arguments': {'x': {'from_node': 'arrayelement1'}, - 'y': {'from_node': 'arrayelement2'}}, - 'process_id': 'add', - 'result': True}, - 'arrayelement1': {'arguments': {'data': {'from_parameter': 'data'}, - 'index': 1}, - 'process_id': 'array_element'}, - 'arrayelement2': {'arguments': {'data': {'from_parameter': 'data'}, - 'index': 2}, - 'process_id': 'array_element'}}}}, - 'process_id': 'apply_dimension', - 'result': True} - + assert actual_graph == { + 'process_id': 'apply_dimension', + 'arguments': { + 'data': {'from_node': 'loadcollection1'}, + 'dimension': 'bands', + 'process': {'process_graph': { + 'arrayelement1': { + 'process_id': 'array_element', + 'arguments': {'data': {'from_parameter': 'data'}, 'index': 1}, + }, + 'arrayelement2': { + 'process_id': 'array_element', + 'arguments': {'data': {'from_parameter': 'data'}, 'index': 2}, + }, + 'add1': { + 'process_id': 'add', + 'arguments': {'x': {'from_node': 'arrayelement1'}, 'y': {'from_node': 'arrayelement2'}}, + 'result': True + }, + }} + }, + 'result': True} From fe5514331baab3b32f1e7a6f7de331f5e590512a Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Fri, 11 Sep 2020 11:08:46 +0200 Subject: [PATCH 3/9] EP-3555 re-introduce "ProcessBuilder" return type-hint --- openeo/processes/builder.py | 3 +- openeo/processes/generator.py | 28 +- openeo/processes/processes.py | 480 +++++++++++++++--------------- tests/processes/test_generator.py | 40 ++- 4 files changed, 294 insertions(+), 257 deletions(-) diff --git a/openeo/processes/builder.py b/openeo/processes/builder.py index 5f2b7f4b6..ec83b6ca6 100644 --- a/openeo/processes/builder.py +++ b/openeo/processes/builder.py @@ -2,7 +2,6 @@ from openeo.internal.graph_building import PGNode - UNSET = object() @@ -20,7 +19,7 @@ def from_parameter(cls, parameter: str): return cls({"from_parameter": parameter}) @classmethod - def process(cls, process_id: str, arguments: dict = None, **kwargs) -> 'ProcessBuilderBase': + def process(cls, process_id: str, arguments: dict = None, **kwargs): """ Apply process, using given arguments diff --git a/openeo/processes/generator.py b/openeo/processes/generator.py index 6b21fb939..2d9cc7fae 100644 --- a/openeo/processes/generator.py +++ b/openeo/processes/generator.py @@ -12,20 +12,25 @@ class PythonRenderer: DEFAULT_WIDTH = 115 def __init__( - self, oo_mode=False, indent=" ", body_template="return process({id!r}, {args})", optional_default="None" + self, oo_mode=False, indent=" ", body_template="return process({id!r}, {args})", optional_default="None", + return_type_hint: str = None ): self.oo_mode = oo_mode self.indent = indent self.body_template = body_template self.optional_default = optional_default + self.return_type_hint = return_type_hint - def render_process(self, process: Process, prefix: str = "", width: int = DEFAULT_WIDTH) -> str: + def render_process(self, process: Process, prefix: str = None, width: int = DEFAULT_WIDTH) -> str: + if prefix is None: + prefix = " " if self.oo_mode else "" # TODO: add type hints # TODO: width limit? - def_line = "def {id}({args}):".format( + def_line = "def {id}({args}){th}:".format( id=self._safe_name(process.id), - args=", ".join(self._def_arguments(process)) + args=", ".join(self._def_arguments(process)), + th=" -> {t}".format(t=self.return_type_hint) if self.return_type_hint else "" ) call_args = ", ".join( @@ -102,11 +107,20 @@ def __add__(self, other): """) - fun_renderer = PythonRenderer(body_template="return process({id!r}, {args})", optional_default="UNSET") - oo_renderer = PythonRenderer(oo_mode=True, body_template="return {id}({args})", optional_default="UNSET") + fun_renderer = PythonRenderer( + body_template="return process({id!r}, {args})", + optional_default="UNSET", + return_type_hint="ProcessBuilder" + ) + oo_renderer = PythonRenderer( + oo_mode=True, + body_template="return {id}({args})", + optional_default="UNSET", + return_type_hint="'ProcessBuilder'" + ) for p in processes: fun_src += fun_renderer.render_process(p) + "\n\n\n" - oo_src += oo_renderer.render_process(p, prefix=" ") + "\n\n" + oo_src += oo_renderer.render_process(p) + "\n\n" output.write(textwrap.dedent(""" # This file is automatically generated. # Do not edit directly. diff --git a/openeo/processes/processes.py b/openeo/processes/processes.py index 8eb192d01..f96c7e67f 100644 --- a/openeo/processes/processes.py +++ b/openeo/processes/processes.py @@ -11,7 +11,7 @@ def __add__(self, other): return self.add(other) - def absolute(self): + def absolute(self) -> 'ProcessBuilder': """ Absolute value @@ -21,7 +21,7 @@ def absolute(self): """ return absolute(x=self) - def add(self, y): + def add(self, y) -> 'ProcessBuilder': """ Addition of two numbers @@ -32,7 +32,7 @@ def add(self, y): """ return add(x=self, y=y) - def add_dimension(self, name, label, type=UNSET): + def add_dimension(self, name, label, type=UNSET) -> 'ProcessBuilder': """ Add a new dimension @@ -46,7 +46,7 @@ def add_dimension(self, name, label, type=UNSET): """ return add_dimension(data=self, name=name, label=label, type=type) - def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context=UNSET): + def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Zonal statistics for geometries @@ -70,7 +70,7 @@ def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context """ return aggregate_spatial(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) - def aggregate_spatial_binary(self, geometries, reducer, target_dimension=UNSET, context=UNSET): + def aggregate_spatial_binary(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Zonal statistics for geometries by binary aggregation @@ -95,7 +95,7 @@ def aggregate_spatial_binary(self, geometries, reducer, target_dimension=UNSET, """ return aggregate_spatial_binary(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) - def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET): + def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Temporal aggregations @@ -127,7 +127,7 @@ def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, """ return aggregate_temporal(data=self, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) - def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UNSET): + def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Temporal aggregations based on calendar hierarchies @@ -164,7 +164,7 @@ def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UN """ return aggregate_temporal_period(data=self, period=period, reducer=reducer, dimension=dimension, context=context) - def all(self, ignore_nodata=UNSET): + def all(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Are all of the values true? @@ -175,7 +175,7 @@ def all(self, ignore_nodata=UNSET): """ return all(data=self, ignore_nodata=ignore_nodata) - def and_(self, y): + def and_(self, y) -> 'ProcessBuilder': """ Logical AND @@ -186,7 +186,7 @@ def and_(self, y): """ return and_(x=self, y=y) - def anomaly(self, normals, period): + def anomaly(self, normals, period) -> 'ProcessBuilder': """ Computes anomalies @@ -225,7 +225,7 @@ def anomaly(self, normals, period): """ return anomaly(data=self, normals=normals, period=period) - def any(self, ignore_nodata=UNSET): + def any(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Is at least one value true? @@ -236,7 +236,7 @@ def any(self, ignore_nodata=UNSET): """ return any(data=self, ignore_nodata=ignore_nodata) - def apply(self, process, context=UNSET): + def apply(self, process, context=UNSET) -> 'ProcessBuilder': """ Apply a process to each pixel @@ -249,7 +249,7 @@ def apply(self, process, context=UNSET): """ return apply(data=self, process=process, context=context) - def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UNSET): + def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Apply a process to pixels along a dimension @@ -286,7 +286,7 @@ def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UN """ return apply_dimension(data=self, process=process, dimension=dimension, target_dimension=target_dimension, context=context) - def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET): + def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET) -> 'ProcessBuilder': """ Apply a spatial convolution with a kernel @@ -312,7 +312,7 @@ def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET """ return apply_kernel(data=self, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) - def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET): + def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET) -> 'ProcessBuilder': """ Apply a process to pixels in a n-dimensional neighbourhood @@ -335,7 +335,7 @@ def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET): """ return apply_neighborhood(data=self, process=process, size=size, overlap=overlap, context=context) - def arccos(self): + def arccos(self) -> 'ProcessBuilder': """ Inverse cosine @@ -345,7 +345,7 @@ def arccos(self): """ return arccos(x=self) - def arcosh(self): + def arcosh(self) -> 'ProcessBuilder': """ Inverse hyperbolic cosine @@ -355,7 +355,7 @@ def arcosh(self): """ return arcosh(x=self) - def arcsin(self): + def arcsin(self) -> 'ProcessBuilder': """ Inverse sine @@ -365,7 +365,7 @@ def arcsin(self): """ return arcsin(x=self) - def arctan(self): + def arctan(self) -> 'ProcessBuilder': """ Inverse tangent @@ -375,7 +375,7 @@ def arctan(self): """ return arctan(x=self) - def arctan2(self, x): + def arctan2(self, x) -> 'ProcessBuilder': """ Inverse tangent of two numbers @@ -386,7 +386,7 @@ def arctan2(self, x): """ return arctan2(y=self, x=x) - def array_apply(self, process, context=UNSET): + def array_apply(self, process, context=UNSET) -> 'ProcessBuilder': """ Applies a unary process to each array element @@ -400,7 +400,7 @@ def array_apply(self, process, context=UNSET): """ return array_apply(data=self, process=process, context=context) - def array_contains(self, value): + def array_contains(self, value) -> 'ProcessBuilder': """ Check whether the array contains a given value @@ -411,7 +411,7 @@ def array_contains(self, value): """ return array_contains(data=self, value=value) - def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET): + def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET) -> 'ProcessBuilder': """ Get an element from an array @@ -425,7 +425,7 @@ def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET): """ return array_element(data=self, index=index, label=label, return_nodata=return_nodata) - def array_filter(self, condition, context=UNSET): + def array_filter(self, condition, context=UNSET) -> 'ProcessBuilder': """ Filter an array based on a condition @@ -439,7 +439,7 @@ def array_filter(self, condition, context=UNSET): """ return array_filter(data=self, condition=condition, context=context) - def array_find(self, value): + def array_find(self, value) -> 'ProcessBuilder': """ Get the index for a value in an array @@ -451,7 +451,7 @@ def array_find(self, value): """ return array_find(data=self, value=value) - def array_labels(self): + def array_labels(self) -> 'ProcessBuilder': """ Get the labels for an array @@ -461,7 +461,7 @@ def array_labels(self): """ return array_labels(data=self) - def arsinh(self): + def arsinh(self) -> 'ProcessBuilder': """ Inverse hyperbolic sine @@ -471,7 +471,7 @@ def arsinh(self): """ return arsinh(x=self) - def artanh(self): + def artanh(self) -> 'ProcessBuilder': """ Inverse hyperbolic tangent @@ -481,7 +481,7 @@ def artanh(self): """ return artanh(x=self) - def between(self, min, max, exclude_max=UNSET): + def between(self, min, max, exclude_max=UNSET) -> 'ProcessBuilder': """ Between comparison @@ -494,7 +494,7 @@ def between(self, min, max, exclude_max=UNSET): """ return between(x=self, min=min, max=max, exclude_max=exclude_max) - def ceil(self): + def ceil(self) -> 'ProcessBuilder': """ Round fractions up @@ -504,7 +504,7 @@ def ceil(self): """ return ceil(x=self) - def climatological_normal(self, period, climatology_period=UNSET): + def climatological_normal(self, period, climatology_period=UNSET) -> 'ProcessBuilder': """ Computes climatology normals @@ -534,7 +534,7 @@ def climatological_normal(self, period, climatology_period=UNSET): """ return climatological_normal(data=self, period=period, climatology_period=climatology_period) - def clip(self, min, max): + def clip(self, min, max) -> 'ProcessBuilder': """ Clip a value between a minimum and a maximum @@ -548,7 +548,7 @@ def clip(self, min, max): """ return clip(x=self, min=min, max=max) - def constant(self): + def constant(self) -> 'ProcessBuilder': """ Define a constant value @@ -558,7 +558,7 @@ def constant(self): """ return constant(x=self) - def cos(self): + def cos(self) -> 'ProcessBuilder': """ Cosine @@ -568,7 +568,7 @@ def cos(self): """ return cos(x=self) - def cosh(self): + def cosh(self) -> 'ProcessBuilder': """ Hyperbolic cosine @@ -578,7 +578,7 @@ def cosh(self): """ return cosh(x=self) - def count(self, condition=UNSET, context=UNSET): + def count(self, condition=UNSET, context=UNSET) -> 'ProcessBuilder': """ Count the number of elements @@ -593,7 +593,7 @@ def count(self, condition=UNSET, context=UNSET): """ return count(data=self, condition=condition, context=context) - def create_raster_cube(self): + def create_raster_cube(self) -> 'ProcessBuilder': """ Create an empty raster data cube @@ -601,7 +601,7 @@ def create_raster_cube(self): """ return create_raster_cube() - def cummax(self, ignore_nodata=UNSET): + def cummax(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative maxima @@ -614,7 +614,7 @@ def cummax(self, ignore_nodata=UNSET): """ return cummax(data=self, ignore_nodata=ignore_nodata) - def cummin(self, ignore_nodata=UNSET): + def cummin(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative minima @@ -627,7 +627,7 @@ def cummin(self, ignore_nodata=UNSET): """ return cummin(data=self, ignore_nodata=ignore_nodata) - def cumproduct(self, ignore_nodata=UNSET): + def cumproduct(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative products @@ -640,7 +640,7 @@ def cumproduct(self, ignore_nodata=UNSET): """ return cumproduct(data=self, ignore_nodata=ignore_nodata) - def cumsum(self, ignore_nodata=UNSET): + def cumsum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Cumulative sums @@ -653,7 +653,7 @@ def cumsum(self, ignore_nodata=UNSET): """ return cumsum(data=self, ignore_nodata=ignore_nodata) - def debug(self, code=UNSET, level=UNSET, message=UNSET): + def debug(self, code=UNSET, level=UNSET, message=UNSET) -> 'ProcessBuilder': """ Publish debugging information @@ -667,7 +667,7 @@ def debug(self, code=UNSET, level=UNSET, message=UNSET): """ return debug(data=self, code=code, level=level, message=message) - def dimension_labels(self, dimension): + def dimension_labels(self, dimension) -> 'ProcessBuilder': """ Get the dimension labels @@ -678,7 +678,7 @@ def dimension_labels(self, dimension): """ return dimension_labels(data=self, dimension=dimension) - def divide(self, y): + def divide(self, y) -> 'ProcessBuilder': """ Division of two numbers @@ -689,7 +689,7 @@ def divide(self, y): """ return divide(x=self, y=y) - def drop_dimension(self, name): + def drop_dimension(self, name) -> 'ProcessBuilder': """ Remove a dimension @@ -702,7 +702,7 @@ def drop_dimension(self, name): """ return drop_dimension(data=self, name=name) - def e(self): + def e(self) -> 'ProcessBuilder': """ Euler's number (e) @@ -710,7 +710,7 @@ def e(self): """ return e() - def eq(self, y, delta=UNSET, case_sensitive=UNSET): + def eq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': """ Equal to comparison @@ -727,7 +727,7 @@ def eq(self, y, delta=UNSET, case_sensitive=UNSET): """ return eq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) - def exp(self): + def exp(self) -> 'ProcessBuilder': """ Exponentiation to the base e @@ -737,7 +737,7 @@ def exp(self): """ return exp(p=self) - def extrema(self, ignore_nodata=UNSET): + def extrema(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Minimum and maximum values @@ -752,7 +752,7 @@ def extrema(self, ignore_nodata=UNSET): """ return extrema(data=self, ignore_nodata=ignore_nodata) - def filter_bands(self, bands=UNSET, wavelengths=UNSET): + def filter_bands(self, bands=UNSET, wavelengths=UNSET) -> 'ProcessBuilder': """ Filter the bands by name @@ -774,7 +774,7 @@ def filter_bands(self, bands=UNSET, wavelengths=UNSET): """ return filter_bands(data=self, bands=bands, wavelengths=wavelengths) - def filter_bbox(self, extent): + def filter_bbox(self, extent) -> 'ProcessBuilder': """ Spatial filter using a bounding box @@ -787,7 +787,7 @@ def filter_bbox(self, extent): """ return filter_bbox(data=self, extent=extent) - def filter_labels(self, condition, dimension, context=UNSET): + def filter_labels(self, condition, dimension, context=UNSET) -> 'ProcessBuilder': """ Filter dimension labels based on a condition @@ -805,7 +805,7 @@ def filter_labels(self, condition, dimension, context=UNSET): """ return filter_labels(data=self, condition=condition, dimension=dimension, context=context) - def filter_spatial(self, geometries): + def filter_spatial(self, geometries) -> 'ProcessBuilder': """ Spatial filter using geometries @@ -818,7 +818,7 @@ def filter_spatial(self, geometries): """ return filter_spatial(data=self, geometries=geometries) - def filter_temporal(self, extent, dimension=UNSET): + def filter_temporal(self, extent, dimension=UNSET) -> 'ProcessBuilder': """ Temporal filter for a temporal intervals @@ -839,7 +839,7 @@ def filter_temporal(self, extent, dimension=UNSET): """ return filter_temporal(data=self, extent=extent, dimension=dimension) - def first(self, ignore_nodata=UNSET): + def first(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ First element @@ -852,7 +852,7 @@ def first(self, ignore_nodata=UNSET): """ return first(data=self, ignore_nodata=ignore_nodata) - def floor(self): + def floor(self) -> 'ProcessBuilder': """ Round fractions down @@ -862,7 +862,7 @@ def floor(self): """ return floor(x=self) - def gt(self, y): + def gt(self, y) -> 'ProcessBuilder': """ Greater than comparison @@ -874,7 +874,7 @@ def gt(self, y): """ return gt(x=self, y=y) - def gte(self, y): + def gte(self, y) -> 'ProcessBuilder': """ Greater than or equal to comparison @@ -886,7 +886,7 @@ def gte(self, y): """ return gte(x=self, y=y) - def if_(self, accept, reject=UNSET): + def if_(self, accept, reject=UNSET) -> 'ProcessBuilder': """ If-Then-Else conditional @@ -898,7 +898,7 @@ def if_(self, accept, reject=UNSET): """ return if_(value=self, accept=accept, reject=reject) - def int(self): + def int(self) -> 'ProcessBuilder': """ Integer part of a number @@ -908,7 +908,7 @@ def int(self): """ return int(x=self) - def is_nan(self): + def is_nan(self) -> 'ProcessBuilder': """ Value is not a number @@ -918,7 +918,7 @@ def is_nan(self): """ return is_nan(x=self) - def is_nodata(self): + def is_nodata(self) -> 'ProcessBuilder': """ Value is not a no-data value @@ -928,7 +928,7 @@ def is_nodata(self): """ return is_nodata(x=self) - def is_valid(self): + def is_valid(self) -> 'ProcessBuilder': """ Value is valid data @@ -938,7 +938,7 @@ def is_valid(self): """ return is_valid(x=self) - def last(self, ignore_nodata=UNSET): + def last(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Last element @@ -951,7 +951,7 @@ def last(self, ignore_nodata=UNSET): """ return last(data=self, ignore_nodata=ignore_nodata) - def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET): + def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET) -> 'ProcessBuilder': """ Linear transformation between two ranges @@ -966,7 +966,7 @@ def linear_scale_range(self, inputMin, inputMax, outputMin=UNSET, outputMax=UNSE """ return linear_scale_range(x=self, inputMin=inputMin, inputMax=inputMax, outputMin=outputMin, outputMax=outputMax) - def ln(self): + def ln(self) -> 'ProcessBuilder': """ Natural logarithm @@ -976,7 +976,7 @@ def ln(self): """ return ln(x=self) - def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET): + def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET) -> 'ProcessBuilder': """ Load a collection @@ -1014,7 +1014,7 @@ def load_collection(self, spatial_extent, temporal_extent, bands=UNSET, properti """ return load_collection(id=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) - def load_result(self): + def load_result(self) -> 'ProcessBuilder': """ Load batch job results @@ -1024,7 +1024,7 @@ def load_result(self): """ return load_result(id=self) - def load_uploaded_files(self, format, options=UNSET): + def load_uploaded_files(self, format, options=UNSET) -> 'ProcessBuilder': """ Load files from the user workspace @@ -1042,7 +1042,7 @@ def load_uploaded_files(self, format, options=UNSET): """ return load_uploaded_files(paths=self, format=format, options=options) - def log(self, base): + def log(self, base) -> 'ProcessBuilder': """ Logarithm to a base @@ -1053,7 +1053,7 @@ def log(self, base): """ return log(x=self, base=base) - def lt(self, y): + def lt(self, y) -> 'ProcessBuilder': """ Less than comparison @@ -1064,7 +1064,7 @@ def lt(self, y): """ return lt(x=self, y=y) - def lte(self, y): + def lte(self, y) -> 'ProcessBuilder': """ Less than or equal to comparison @@ -1076,7 +1076,7 @@ def lte(self, y): """ return lte(x=self, y=y) - def mask(self, mask, replacement=UNSET): + def mask(self, mask, replacement=UNSET) -> 'ProcessBuilder': """ Apply a raster mask @@ -1090,7 +1090,7 @@ def mask(self, mask, replacement=UNSET): """ return mask(data=self, mask=mask, replacement=replacement) - def mask_polygon(self, mask, replacement=UNSET, inside=UNSET): + def mask_polygon(self, mask, replacement=UNSET, inside=UNSET) -> 'ProcessBuilder': """ Apply a polygon mask @@ -1107,7 +1107,7 @@ def mask_polygon(self, mask, replacement=UNSET, inside=UNSET): """ return mask_polygon(data=self, mask=mask, replacement=replacement, inside=inside) - def max(self, ignore_nodata=UNSET): + def max(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Maximum value @@ -1120,7 +1120,7 @@ def max(self, ignore_nodata=UNSET): """ return max(data=self, ignore_nodata=ignore_nodata) - def mean(self, ignore_nodata=UNSET): + def mean(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Arithmetic mean (average) @@ -1133,7 +1133,7 @@ def mean(self, ignore_nodata=UNSET): """ return mean(data=self, ignore_nodata=ignore_nodata) - def median(self, ignore_nodata=UNSET): + def median(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Statistical median @@ -1146,7 +1146,7 @@ def median(self, ignore_nodata=UNSET): """ return median(data=self, ignore_nodata=ignore_nodata) - def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET): + def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET) -> 'ProcessBuilder': """ Merging two data cubes @@ -1163,7 +1163,7 @@ def merge_cubes(self, cube2, overlap_resolver=UNSET, context=UNSET): """ return merge_cubes(cube1=self, cube2=cube2, overlap_resolver=overlap_resolver, context=context) - def min(self, ignore_nodata=UNSET): + def min(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Minimum value @@ -1176,7 +1176,7 @@ def min(self, ignore_nodata=UNSET): """ return min(data=self, ignore_nodata=ignore_nodata) - def mod(self, y): + def mod(self, y) -> 'ProcessBuilder': """ Modulo @@ -1187,7 +1187,7 @@ def mod(self, y): """ return mod(x=self, y=y) - def multiply(self, y): + def multiply(self, y) -> 'ProcessBuilder': """ Multiplication of two numbers @@ -1198,7 +1198,7 @@ def multiply(self, y): """ return multiply(x=self, y=y) - def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET): + def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET) -> 'ProcessBuilder': """ Normalized Difference Vegetation Index @@ -1225,7 +1225,7 @@ def ndvi(self, nir=UNSET, red=UNSET, target_band=UNSET): """ return ndvi(data=self, nir=nir, red=red, target_band=target_band) - def neq(self, y, delta=UNSET, case_sensitive=UNSET): + def neq(self, y, delta=UNSET, case_sensitive=UNSET) -> 'ProcessBuilder': """ Not equal to comparison @@ -1243,7 +1243,7 @@ def neq(self, y, delta=UNSET, case_sensitive=UNSET): """ return neq(x=self, y=y, delta=delta, case_sensitive=case_sensitive) - def normalized_difference(self, y): + def normalized_difference(self, y) -> 'ProcessBuilder': """ Normalized difference @@ -1254,7 +1254,7 @@ def normalized_difference(self, y): """ return normalized_difference(x=self, y=y) - def not_(self): + def not_(self) -> 'ProcessBuilder': """ Inverting a boolean @@ -1264,7 +1264,7 @@ def not_(self): """ return not_(x=self) - def or_(self, y): + def or_(self, y) -> 'ProcessBuilder': """ Logical OR @@ -1275,7 +1275,7 @@ def or_(self, y): """ return or_(x=self, y=y) - def order(self, asc=UNSET, nodata=UNSET): + def order(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': """ Create a permutation @@ -1289,7 +1289,7 @@ def order(self, asc=UNSET, nodata=UNSET): """ return order(data=self, asc=asc, nodata=nodata) - def pi(self): + def pi(self) -> 'ProcessBuilder': """ Pi (π) @@ -1297,7 +1297,7 @@ def pi(self): """ return pi() - def power(self, p): + def power(self, p) -> 'ProcessBuilder': """ Exponentiation @@ -1308,7 +1308,7 @@ def power(self, p): """ return power(base=self, p=p) - def product(self, ignore_nodata=UNSET): + def product(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Compute the product by multiplying numbers @@ -1321,7 +1321,7 @@ def product(self, ignore_nodata=UNSET): """ return product(data=self, ignore_nodata=ignore_nodata) - def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET): + def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Quantiles @@ -1341,7 +1341,7 @@ def quantiles(self, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET): """ return quantiles(data=self, probabilities=probabilities, q=q, ignore_nodata=ignore_nodata) - def rearrange(self, order): + def rearrange(self, order) -> 'ProcessBuilder': """ Rearrange an array based on a permutation @@ -1352,7 +1352,7 @@ def rearrange(self, order): """ return rearrange(data=self, order=order) - def reduce_dimension(self, reducer, dimension, context=UNSET): + def reduce_dimension(self, reducer, dimension, context=UNSET) -> 'ProcessBuilder': """ Reduce dimensions @@ -1370,7 +1370,7 @@ def reduce_dimension(self, reducer, dimension, context=UNSET): """ return reduce_dimension(data=self, reducer=reducer, dimension=dimension, context=context) - def reduce_dimension_binary(self, reducer, dimension, context=UNSET): + def reduce_dimension_binary(self, reducer, dimension, context=UNSET) -> 'ProcessBuilder': """ Reduce dimensions using binary reduction @@ -1389,7 +1389,7 @@ def reduce_dimension_binary(self, reducer, dimension, context=UNSET): """ return reduce_dimension_binary(data=self, reducer=reducer, dimension=dimension, context=context) - def rename_dimension(self, source, target): + def rename_dimension(self, source, target) -> 'ProcessBuilder': """ Rename a dimension @@ -1405,7 +1405,7 @@ def rename_dimension(self, source, target): """ return rename_dimension(data=self, source=source, target=target) - def rename_labels(self, dimension, target, source=UNSET): + def rename_labels(self, dimension, target, source=UNSET) -> 'ProcessBuilder': """ Rename dimension labels @@ -1426,7 +1426,7 @@ def rename_labels(self, dimension, target, source=UNSET): """ return rename_labels(data=self, dimension=dimension, target=target, source=source) - def resample_cube_spatial(self, target, method=UNSET): + def resample_cube_spatial(self, target, method=UNSET) -> 'ProcessBuilder': """ Resample the spatial dimensions to match a target data cube @@ -1441,7 +1441,7 @@ def resample_cube_spatial(self, target, method=UNSET): """ return resample_cube_spatial(data=self, target=target, method=method) - def resample_cube_temporal(self, target, method, dimension=UNSET, context=UNSET): + def resample_cube_temporal(self, target, method, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': """ Resample a temporal dimension to match a target data cube @@ -1463,7 +1463,7 @@ def resample_cube_temporal(self, target, method, dimension=UNSET, context=UNSET) """ return resample_cube_temporal(data=self, target=target, method=method, dimension=dimension, context=context) - def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET): + def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET) -> 'ProcessBuilder': """ Resample and warp the spatial dimensions @@ -1487,7 +1487,7 @@ def resample_spatial(self, resolution=UNSET, projection=UNSET, method=UNSET, ali """ return resample_spatial(data=self, resolution=resolution, projection=projection, method=method, align=align) - def round(self, p=UNSET): + def round(self, p=UNSET) -> 'ProcessBuilder': """ Round to a specified precision @@ -1500,7 +1500,7 @@ def round(self, p=UNSET): """ return round(x=self, p=p) - def run_udf(self, udf, runtime, version=UNSET, context=UNSET): + def run_udf(self, udf, runtime, version=UNSET, context=UNSET) -> 'ProcessBuilder': """ Run an UDF @@ -1518,7 +1518,7 @@ def run_udf(self, udf, runtime, version=UNSET, context=UNSET): """ return run_udf(data=self, udf=udf, runtime=runtime, version=version, context=context) - def run_udf_externally(self, url, context=UNSET): + def run_udf_externally(self, url, context=UNSET) -> 'ProcessBuilder': """ Run an externally hosted UDF container @@ -1533,7 +1533,7 @@ def run_udf_externally(self, url, context=UNSET): """ return run_udf_externally(data=self, url=url, context=context) - def save_result(self, format, options=UNSET): + def save_result(self, format, options=UNSET) -> 'ProcessBuilder': """ Save processed data to storage @@ -1550,7 +1550,7 @@ def save_result(self, format, options=UNSET): """ return save_result(data=self, format=format, options=options) - def sd(self, ignore_nodata=UNSET): + def sd(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Standard deviation @@ -1563,7 +1563,7 @@ def sd(self, ignore_nodata=UNSET): """ return sd(data=self, ignore_nodata=ignore_nodata) - def sgn(self): + def sgn(self) -> 'ProcessBuilder': """ Signum @@ -1573,7 +1573,7 @@ def sgn(self): """ return sgn(x=self) - def sin(self): + def sin(self) -> 'ProcessBuilder': """ Sine @@ -1583,7 +1583,7 @@ def sin(self): """ return sin(x=self) - def sinh(self): + def sinh(self) -> 'ProcessBuilder': """ Hyperbolic sine @@ -1593,7 +1593,7 @@ def sinh(self): """ return sinh(x=self) - def sort(self, asc=UNSET, nodata=UNSET): + def sort(self, asc=UNSET, nodata=UNSET) -> 'ProcessBuilder': """ Sort data @@ -1607,7 +1607,7 @@ def sort(self, asc=UNSET, nodata=UNSET): """ return sort(data=self, asc=asc, nodata=nodata) - def sqrt(self): + def sqrt(self) -> 'ProcessBuilder': """ Square root @@ -1617,7 +1617,7 @@ def sqrt(self): """ return sqrt(x=self) - def subtract(self, y): + def subtract(self, y) -> 'ProcessBuilder': """ Subtraction of two numbers @@ -1628,7 +1628,7 @@ def subtract(self, y): """ return subtract(x=self, y=y) - def sum(self, ignore_nodata=UNSET): + def sum(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Compute the sum by adding up numbers @@ -1641,7 +1641,7 @@ def sum(self, ignore_nodata=UNSET): """ return sum(data=self, ignore_nodata=ignore_nodata) - def tan(self): + def tan(self) -> 'ProcessBuilder': """ Tangent @@ -1651,7 +1651,7 @@ def tan(self): """ return tan(x=self) - def tanh(self): + def tanh(self) -> 'ProcessBuilder': """ Hyperbolic tangent @@ -1661,7 +1661,7 @@ def tanh(self): """ return tanh(x=self) - def text_begins(self, pattern, case_sensitive=UNSET): + def text_begins(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': """ Text begins with another text @@ -1673,7 +1673,7 @@ def text_begins(self, pattern, case_sensitive=UNSET): """ return text_begins(data=self, pattern=pattern, case_sensitive=case_sensitive) - def text_contains(self, pattern, case_sensitive=UNSET): + def text_contains(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': """ Text contains another text @@ -1685,7 +1685,7 @@ def text_contains(self, pattern, case_sensitive=UNSET): """ return text_contains(data=self, pattern=pattern, case_sensitive=case_sensitive) - def text_ends(self, pattern, case_sensitive=UNSET): + def text_ends(self, pattern, case_sensitive=UNSET) -> 'ProcessBuilder': """ Text ends with another text @@ -1697,7 +1697,7 @@ def text_ends(self, pattern, case_sensitive=UNSET): """ return text_ends(data=self, pattern=pattern, case_sensitive=case_sensitive) - def text_merge(self, separator=UNSET): + def text_merge(self, separator=UNSET) -> 'ProcessBuilder': """ Concatenate elements to a string @@ -1711,7 +1711,7 @@ def text_merge(self, separator=UNSET): """ return text_merge(data=self, separator=separator) - def trim_cube(self): + def trim_cube(self) -> 'ProcessBuilder': """ Remove dimension labels with no-data values @@ -1722,7 +1722,7 @@ def trim_cube(self): """ return trim_cube(data=self) - def variance(self, ignore_nodata=UNSET): + def variance(self, ignore_nodata=UNSET) -> 'ProcessBuilder': """ Variance @@ -1735,7 +1735,7 @@ def variance(self, ignore_nodata=UNSET): """ return variance(data=self, ignore_nodata=ignore_nodata) - def xor(self, y): + def xor(self, y) -> 'ProcessBuilder': """ Logical XOR (exclusive or) @@ -1751,7 +1751,7 @@ def xor(self, y): process = ProcessBuilder.process -def absolute(x): +def absolute(x) -> ProcessBuilder: """ Absolute value @@ -1762,7 +1762,7 @@ def absolute(x): return process('absolute', x=x) -def add(x, y): +def add(x, y) -> ProcessBuilder: """ Addition of two numbers @@ -1774,7 +1774,7 @@ def add(x, y): return process('add', x=x, y=y) -def add_dimension(data, name, label, type=UNSET): +def add_dimension(data, name, label, type=UNSET) -> ProcessBuilder: """ Add a new dimension @@ -1789,7 +1789,7 @@ def add_dimension(data, name, label, type=UNSET): return process('add_dimension', data=data, name=name, label=label, type=type) -def aggregate_spatial(data, geometries, reducer, target_dimension=UNSET, context=UNSET): +def aggregate_spatial(data, geometries, reducer, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Zonal statistics for geometries @@ -1813,7 +1813,7 @@ def aggregate_spatial(data, geometries, reducer, target_dimension=UNSET, context return process('aggregate_spatial', data=data, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) -def aggregate_spatial_binary(data, geometries, reducer, target_dimension=UNSET, context=UNSET): +def aggregate_spatial_binary(data, geometries, reducer, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Zonal statistics for geometries by binary aggregation @@ -1838,7 +1838,7 @@ def aggregate_spatial_binary(data, geometries, reducer, target_dimension=UNSET, return process('aggregate_spatial_binary', data=data, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) -def aggregate_temporal(data, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET): +def aggregate_temporal(data, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Temporal aggregations @@ -1870,7 +1870,7 @@ def aggregate_temporal(data, intervals, reducer, labels=UNSET, dimension=UNSET, return process('aggregate_temporal', data=data, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) -def aggregate_temporal_period(data, period, reducer, dimension=UNSET, context=UNSET): +def aggregate_temporal_period(data, period, reducer, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Temporal aggregations based on calendar hierarchies @@ -1907,7 +1907,7 @@ def aggregate_temporal_period(data, period, reducer, dimension=UNSET, context=UN return process('aggregate_temporal_period', data=data, period=period, reducer=reducer, dimension=dimension, context=context) -def all(data, ignore_nodata=UNSET): +def all(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Are all of the values true? @@ -1919,7 +1919,7 @@ def all(data, ignore_nodata=UNSET): return process('all', data=data, ignore_nodata=ignore_nodata) -def and_(x, y): +def and_(x, y) -> ProcessBuilder: """ Logical AND @@ -1931,7 +1931,7 @@ def and_(x, y): return process('and_', x=x, y=y) -def anomaly(data, normals, period): +def anomaly(data, normals, period) -> ProcessBuilder: """ Computes anomalies @@ -1970,7 +1970,7 @@ def anomaly(data, normals, period): return process('anomaly', data=data, normals=normals, period=period) -def any(data, ignore_nodata=UNSET): +def any(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Is at least one value true? @@ -1982,7 +1982,7 @@ def any(data, ignore_nodata=UNSET): return process('any', data=data, ignore_nodata=ignore_nodata) -def apply(data, process, context=UNSET): +def apply(data, process, context=UNSET) -> ProcessBuilder: """ Apply a process to each pixel @@ -1996,7 +1996,7 @@ def apply(data, process, context=UNSET): return process('apply', data=data, process=process, context=context) -def apply_dimension(data, process, dimension, target_dimension=UNSET, context=UNSET): +def apply_dimension(data, process, dimension, target_dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Apply a process to pixels along a dimension @@ -2033,7 +2033,7 @@ def apply_dimension(data, process, dimension, target_dimension=UNSET, context=UN return process('apply_dimension', data=data, process=process, dimension=dimension, target_dimension=target_dimension, context=context) -def apply_kernel(data, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET): +def apply_kernel(data, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET) -> ProcessBuilder: """ Apply a spatial convolution with a kernel @@ -2059,7 +2059,7 @@ def apply_kernel(data, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET return process('apply_kernel', data=data, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) -def apply_neighborhood(data, process, size, overlap=UNSET, context=UNSET): +def apply_neighborhood(data, process, size, overlap=UNSET, context=UNSET) -> ProcessBuilder: """ Apply a process to pixels in a n-dimensional neighbourhood @@ -2082,7 +2082,7 @@ def apply_neighborhood(data, process, size, overlap=UNSET, context=UNSET): return process('apply_neighborhood', data=data, process=process, size=size, overlap=overlap, context=context) -def arccos(x): +def arccos(x) -> ProcessBuilder: """ Inverse cosine @@ -2093,7 +2093,7 @@ def arccos(x): return process('arccos', x=x) -def arcosh(x): +def arcosh(x) -> ProcessBuilder: """ Inverse hyperbolic cosine @@ -2104,7 +2104,7 @@ def arcosh(x): return process('arcosh', x=x) -def arcsin(x): +def arcsin(x) -> ProcessBuilder: """ Inverse sine @@ -2115,7 +2115,7 @@ def arcsin(x): return process('arcsin', x=x) -def arctan(x): +def arctan(x) -> ProcessBuilder: """ Inverse tangent @@ -2126,7 +2126,7 @@ def arctan(x): return process('arctan', x=x) -def arctan2(y, x): +def arctan2(y, x) -> ProcessBuilder: """ Inverse tangent of two numbers @@ -2138,7 +2138,7 @@ def arctan2(y, x): return process('arctan2', y=y, x=x) -def array_apply(data, process, context=UNSET): +def array_apply(data, process, context=UNSET) -> ProcessBuilder: """ Applies a unary process to each array element @@ -2153,7 +2153,7 @@ def array_apply(data, process, context=UNSET): return process('array_apply', data=data, process=process, context=context) -def array_contains(data, value): +def array_contains(data, value) -> ProcessBuilder: """ Check whether the array contains a given value @@ -2165,7 +2165,7 @@ def array_contains(data, value): return process('array_contains', data=data, value=value) -def array_element(data, index=UNSET, label=UNSET, return_nodata=UNSET): +def array_element(data, index=UNSET, label=UNSET, return_nodata=UNSET) -> ProcessBuilder: """ Get an element from an array @@ -2180,7 +2180,7 @@ def array_element(data, index=UNSET, label=UNSET, return_nodata=UNSET): return process('array_element', data=data, index=index, label=label, return_nodata=return_nodata) -def array_filter(data, condition, context=UNSET): +def array_filter(data, condition, context=UNSET) -> ProcessBuilder: """ Filter an array based on a condition @@ -2195,7 +2195,7 @@ def array_filter(data, condition, context=UNSET): return process('array_filter', data=data, condition=condition, context=context) -def array_find(data, value): +def array_find(data, value) -> ProcessBuilder: """ Get the index for a value in an array @@ -2208,7 +2208,7 @@ def array_find(data, value): return process('array_find', data=data, value=value) -def array_labels(data): +def array_labels(data) -> ProcessBuilder: """ Get the labels for an array @@ -2219,7 +2219,7 @@ def array_labels(data): return process('array_labels', data=data) -def arsinh(x): +def arsinh(x) -> ProcessBuilder: """ Inverse hyperbolic sine @@ -2230,7 +2230,7 @@ def arsinh(x): return process('arsinh', x=x) -def artanh(x): +def artanh(x) -> ProcessBuilder: """ Inverse hyperbolic tangent @@ -2241,7 +2241,7 @@ def artanh(x): return process('artanh', x=x) -def between(x, min, max, exclude_max=UNSET): +def between(x, min, max, exclude_max=UNSET) -> ProcessBuilder: """ Between comparison @@ -2255,7 +2255,7 @@ def between(x, min, max, exclude_max=UNSET): return process('between', x=x, min=min, max=max, exclude_max=exclude_max) -def ceil(x): +def ceil(x) -> ProcessBuilder: """ Round fractions up @@ -2266,7 +2266,7 @@ def ceil(x): return process('ceil', x=x) -def climatological_normal(data, period, climatology_period=UNSET): +def climatological_normal(data, period, climatology_period=UNSET) -> ProcessBuilder: """ Computes climatology normals @@ -2295,7 +2295,7 @@ def climatological_normal(data, period, climatology_period=UNSET): return process('climatological_normal', data=data, period=period, climatology_period=climatology_period) -def clip(x, min, max): +def clip(x, min, max) -> ProcessBuilder: """ Clip a value between a minimum and a maximum @@ -2310,7 +2310,7 @@ def clip(x, min, max): return process('clip', x=x, min=min, max=max) -def constant(x): +def constant(x) -> ProcessBuilder: """ Define a constant value @@ -2321,7 +2321,7 @@ def constant(x): return process('constant', x=x) -def cos(x): +def cos(x) -> ProcessBuilder: """ Cosine @@ -2332,7 +2332,7 @@ def cos(x): return process('cos', x=x) -def cosh(x): +def cosh(x) -> ProcessBuilder: """ Hyperbolic cosine @@ -2343,7 +2343,7 @@ def cosh(x): return process('cosh', x=x) -def count(data, condition=UNSET, context=UNSET): +def count(data, condition=UNSET, context=UNSET) -> ProcessBuilder: """ Count the number of elements @@ -2359,7 +2359,7 @@ def count(data, condition=UNSET, context=UNSET): return process('count', data=data, condition=condition, context=context) -def create_raster_cube(): +def create_raster_cube() -> ProcessBuilder: """ Create an empty raster data cube @@ -2368,7 +2368,7 @@ def create_raster_cube(): return process('create_raster_cube', ) -def cummax(data, ignore_nodata=UNSET): +def cummax(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative maxima @@ -2381,7 +2381,7 @@ def cummax(data, ignore_nodata=UNSET): return process('cummax', data=data, ignore_nodata=ignore_nodata) -def cummin(data, ignore_nodata=UNSET): +def cummin(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative minima @@ -2394,7 +2394,7 @@ def cummin(data, ignore_nodata=UNSET): return process('cummin', data=data, ignore_nodata=ignore_nodata) -def cumproduct(data, ignore_nodata=UNSET): +def cumproduct(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative products @@ -2407,7 +2407,7 @@ def cumproduct(data, ignore_nodata=UNSET): return process('cumproduct', data=data, ignore_nodata=ignore_nodata) -def cumsum(data, ignore_nodata=UNSET): +def cumsum(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Cumulative sums @@ -2420,7 +2420,7 @@ def cumsum(data, ignore_nodata=UNSET): return process('cumsum', data=data, ignore_nodata=ignore_nodata) -def debug(data, code=UNSET, level=UNSET, message=UNSET): +def debug(data, code=UNSET, level=UNSET, message=UNSET) -> ProcessBuilder: """ Publish debugging information @@ -2435,7 +2435,7 @@ def debug(data, code=UNSET, level=UNSET, message=UNSET): return process('debug', data=data, code=code, level=level, message=message) -def dimension_labels(data, dimension): +def dimension_labels(data, dimension) -> ProcessBuilder: """ Get the dimension labels @@ -2447,7 +2447,7 @@ def dimension_labels(data, dimension): return process('dimension_labels', data=data, dimension=dimension) -def divide(x, y): +def divide(x, y) -> ProcessBuilder: """ Division of two numbers @@ -2459,7 +2459,7 @@ def divide(x, y): return process('divide', x=x, y=y) -def drop_dimension(data, name): +def drop_dimension(data, name) -> ProcessBuilder: """ Remove a dimension @@ -2473,7 +2473,7 @@ def drop_dimension(data, name): return process('drop_dimension', data=data, name=name) -def e(): +def e() -> ProcessBuilder: """ Euler's number (e) @@ -2482,7 +2482,7 @@ def e(): return process('e', ) -def eq(x, y, delta=UNSET, case_sensitive=UNSET): +def eq(x, y, delta=UNSET, case_sensitive=UNSET) -> ProcessBuilder: """ Equal to comparison @@ -2500,7 +2500,7 @@ def eq(x, y, delta=UNSET, case_sensitive=UNSET): return process('eq', x=x, y=y, delta=delta, case_sensitive=case_sensitive) -def exp(p): +def exp(p) -> ProcessBuilder: """ Exponentiation to the base e @@ -2511,7 +2511,7 @@ def exp(p): return process('exp', p=p) -def extrema(data, ignore_nodata=UNSET): +def extrema(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Minimum and maximum values @@ -2527,7 +2527,7 @@ def extrema(data, ignore_nodata=UNSET): return process('extrema', data=data, ignore_nodata=ignore_nodata) -def filter_bands(data, bands=UNSET, wavelengths=UNSET): +def filter_bands(data, bands=UNSET, wavelengths=UNSET) -> ProcessBuilder: """ Filter the bands by name @@ -2549,7 +2549,7 @@ def filter_bands(data, bands=UNSET, wavelengths=UNSET): return process('filter_bands', data=data, bands=bands, wavelengths=wavelengths) -def filter_bbox(data, extent): +def filter_bbox(data, extent) -> ProcessBuilder: """ Spatial filter using a bounding box @@ -2563,7 +2563,7 @@ def filter_bbox(data, extent): return process('filter_bbox', data=data, extent=extent) -def filter_labels(data, condition, dimension, context=UNSET): +def filter_labels(data, condition, dimension, context=UNSET) -> ProcessBuilder: """ Filter dimension labels based on a condition @@ -2582,7 +2582,7 @@ def filter_labels(data, condition, dimension, context=UNSET): return process('filter_labels', data=data, condition=condition, dimension=dimension, context=context) -def filter_spatial(data, geometries): +def filter_spatial(data, geometries) -> ProcessBuilder: """ Spatial filter using geometries @@ -2596,7 +2596,7 @@ def filter_spatial(data, geometries): return process('filter_spatial', data=data, geometries=geometries) -def filter_temporal(data, extent, dimension=UNSET): +def filter_temporal(data, extent, dimension=UNSET) -> ProcessBuilder: """ Temporal filter for a temporal intervals @@ -2618,7 +2618,7 @@ def filter_temporal(data, extent, dimension=UNSET): return process('filter_temporal', data=data, extent=extent, dimension=dimension) -def first(data, ignore_nodata=UNSET): +def first(data, ignore_nodata=UNSET) -> ProcessBuilder: """ First element @@ -2632,7 +2632,7 @@ def first(data, ignore_nodata=UNSET): return process('first', data=data, ignore_nodata=ignore_nodata) -def floor(x): +def floor(x) -> ProcessBuilder: """ Round fractions down @@ -2643,7 +2643,7 @@ def floor(x): return process('floor', x=x) -def gt(x, y): +def gt(x, y) -> ProcessBuilder: """ Greater than comparison @@ -2655,7 +2655,7 @@ def gt(x, y): return process('gt', x=x, y=y) -def gte(x, y): +def gte(x, y) -> ProcessBuilder: """ Greater than or equal to comparison @@ -2667,7 +2667,7 @@ def gte(x, y): return process('gte', x=x, y=y) -def if_(value, accept, reject=UNSET): +def if_(value, accept, reject=UNSET) -> ProcessBuilder: """ If-Then-Else conditional @@ -2680,7 +2680,7 @@ def if_(value, accept, reject=UNSET): return process('if_', value=value, accept=accept, reject=reject) -def int(x): +def int(x) -> ProcessBuilder: """ Integer part of a number @@ -2691,7 +2691,7 @@ def int(x): return process('int', x=x) -def is_nan(x): +def is_nan(x) -> ProcessBuilder: """ Value is not a number @@ -2702,7 +2702,7 @@ def is_nan(x): return process('is_nan', x=x) -def is_nodata(x): +def is_nodata(x) -> ProcessBuilder: """ Value is not a no-data value @@ -2713,7 +2713,7 @@ def is_nodata(x): return process('is_nodata', x=x) -def is_valid(x): +def is_valid(x) -> ProcessBuilder: """ Value is valid data @@ -2724,7 +2724,7 @@ def is_valid(x): return process('is_valid', x=x) -def last(data, ignore_nodata=UNSET): +def last(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Last element @@ -2737,7 +2737,7 @@ def last(data, ignore_nodata=UNSET): return process('last', data=data, ignore_nodata=ignore_nodata) -def linear_scale_range(x, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET): +def linear_scale_range(x, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET) -> ProcessBuilder: """ Linear transformation between two ranges @@ -2753,7 +2753,7 @@ def linear_scale_range(x, inputMin, inputMax, outputMin=UNSET, outputMax=UNSET): return process('linear_scale_range', x=x, inputMin=inputMin, inputMax=inputMax, outputMin=outputMin, outputMax=outputMax) -def ln(x): +def ln(x) -> ProcessBuilder: """ Natural logarithm @@ -2764,7 +2764,7 @@ def ln(x): return process('ln', x=x) -def load_collection(id, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET): +def load_collection(id, spatial_extent, temporal_extent, bands=UNSET, properties=UNSET) -> ProcessBuilder: """ Load a collection @@ -2802,7 +2802,7 @@ def load_collection(id, spatial_extent, temporal_extent, bands=UNSET, properties return process('load_collection', id=id, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties) -def load_result(id): +def load_result(id) -> ProcessBuilder: """ Load batch job results @@ -2813,7 +2813,7 @@ def load_result(id): return process('load_result', id=id) -def load_uploaded_files(paths, format, options=UNSET): +def load_uploaded_files(paths, format, options=UNSET) -> ProcessBuilder: """ Load files from the user workspace @@ -2832,7 +2832,7 @@ def load_uploaded_files(paths, format, options=UNSET): return process('load_uploaded_files', paths=paths, format=format, options=options) -def log(x, base): +def log(x, base) -> ProcessBuilder: """ Logarithm to a base @@ -2844,7 +2844,7 @@ def log(x, base): return process('log', x=x, base=base) -def lt(x, y): +def lt(x, y) -> ProcessBuilder: """ Less than comparison @@ -2856,7 +2856,7 @@ def lt(x, y): return process('lt', x=x, y=y) -def lte(x, y): +def lte(x, y) -> ProcessBuilder: """ Less than or equal to comparison @@ -2868,7 +2868,7 @@ def lte(x, y): return process('lte', x=x, y=y) -def mask(data, mask, replacement=UNSET): +def mask(data, mask, replacement=UNSET) -> ProcessBuilder: """ Apply a raster mask @@ -2882,7 +2882,7 @@ def mask(data, mask, replacement=UNSET): return process('mask', data=data, mask=mask, replacement=replacement) -def mask_polygon(data, mask, replacement=UNSET, inside=UNSET): +def mask_polygon(data, mask, replacement=UNSET, inside=UNSET) -> ProcessBuilder: """ Apply a polygon mask @@ -2900,7 +2900,7 @@ def mask_polygon(data, mask, replacement=UNSET, inside=UNSET): return process('mask_polygon', data=data, mask=mask, replacement=replacement, inside=inside) -def max(data, ignore_nodata=UNSET): +def max(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Maximum value @@ -2913,7 +2913,7 @@ def max(data, ignore_nodata=UNSET): return process('max', data=data, ignore_nodata=ignore_nodata) -def mean(data, ignore_nodata=UNSET): +def mean(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Arithmetic mean (average) @@ -2926,7 +2926,7 @@ def mean(data, ignore_nodata=UNSET): return process('mean', data=data, ignore_nodata=ignore_nodata) -def median(data, ignore_nodata=UNSET): +def median(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Statistical median @@ -2939,7 +2939,7 @@ def median(data, ignore_nodata=UNSET): return process('median', data=data, ignore_nodata=ignore_nodata) -def merge_cubes(cube1, cube2, overlap_resolver=UNSET, context=UNSET): +def merge_cubes(cube1, cube2, overlap_resolver=UNSET, context=UNSET) -> ProcessBuilder: """ Merging two data cubes @@ -2957,7 +2957,7 @@ def merge_cubes(cube1, cube2, overlap_resolver=UNSET, context=UNSET): return process('merge_cubes', cube1=cube1, cube2=cube2, overlap_resolver=overlap_resolver, context=context) -def min(data, ignore_nodata=UNSET): +def min(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Minimum value @@ -2970,7 +2970,7 @@ def min(data, ignore_nodata=UNSET): return process('min', data=data, ignore_nodata=ignore_nodata) -def mod(x, y): +def mod(x, y) -> ProcessBuilder: """ Modulo @@ -2982,7 +2982,7 @@ def mod(x, y): return process('mod', x=x, y=y) -def multiply(x, y): +def multiply(x, y) -> ProcessBuilder: """ Multiplication of two numbers @@ -2994,7 +2994,7 @@ def multiply(x, y): return process('multiply', x=x, y=y) -def ndvi(data, nir=UNSET, red=UNSET, target_band=UNSET): +def ndvi(data, nir=UNSET, red=UNSET, target_band=UNSET) -> ProcessBuilder: """ Normalized Difference Vegetation Index @@ -3022,7 +3022,7 @@ def ndvi(data, nir=UNSET, red=UNSET, target_band=UNSET): return process('ndvi', data=data, nir=nir, red=red, target_band=target_band) -def neq(x, y, delta=UNSET, case_sensitive=UNSET): +def neq(x, y, delta=UNSET, case_sensitive=UNSET) -> ProcessBuilder: """ Not equal to comparison @@ -3040,7 +3040,7 @@ def neq(x, y, delta=UNSET, case_sensitive=UNSET): return process('neq', x=x, y=y, delta=delta, case_sensitive=case_sensitive) -def normalized_difference(x, y): +def normalized_difference(x, y) -> ProcessBuilder: """ Normalized difference @@ -3052,7 +3052,7 @@ def normalized_difference(x, y): return process('normalized_difference', x=x, y=y) -def not_(x): +def not_(x) -> ProcessBuilder: """ Inverting a boolean @@ -3063,7 +3063,7 @@ def not_(x): return process('not_', x=x) -def or_(x, y): +def or_(x, y) -> ProcessBuilder: """ Logical OR @@ -3075,7 +3075,7 @@ def or_(x, y): return process('or_', x=x, y=y) -def order(data, asc=UNSET, nodata=UNSET): +def order(data, asc=UNSET, nodata=UNSET) -> ProcessBuilder: """ Create a permutation @@ -3090,7 +3090,7 @@ def order(data, asc=UNSET, nodata=UNSET): return process('order', data=data, asc=asc, nodata=nodata) -def pi(): +def pi() -> ProcessBuilder: """ Pi (π) @@ -3099,7 +3099,7 @@ def pi(): return process('pi', ) -def power(base, p): +def power(base, p) -> ProcessBuilder: """ Exponentiation @@ -3111,7 +3111,7 @@ def power(base, p): return process('power', base=base, p=p) -def product(data, ignore_nodata=UNSET): +def product(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Compute the product by multiplying numbers @@ -3124,7 +3124,7 @@ def product(data, ignore_nodata=UNSET): return process('product', data=data, ignore_nodata=ignore_nodata) -def quantiles(data, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET): +def quantiles(data, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET) -> ProcessBuilder: """ Quantiles @@ -3145,7 +3145,7 @@ def quantiles(data, probabilities=UNSET, q=UNSET, ignore_nodata=UNSET): return process('quantiles', data=data, probabilities=probabilities, q=q, ignore_nodata=ignore_nodata) -def rearrange(data, order): +def rearrange(data, order) -> ProcessBuilder: """ Rearrange an array based on a permutation @@ -3157,7 +3157,7 @@ def rearrange(data, order): return process('rearrange', data=data, order=order) -def reduce_dimension(data, reducer, dimension, context=UNSET): +def reduce_dimension(data, reducer, dimension, context=UNSET) -> ProcessBuilder: """ Reduce dimensions @@ -3176,7 +3176,7 @@ def reduce_dimension(data, reducer, dimension, context=UNSET): return process('reduce_dimension', data=data, reducer=reducer, dimension=dimension, context=context) -def reduce_dimension_binary(data, reducer, dimension, context=UNSET): +def reduce_dimension_binary(data, reducer, dimension, context=UNSET) -> ProcessBuilder: """ Reduce dimensions using binary reduction @@ -3196,7 +3196,7 @@ def reduce_dimension_binary(data, reducer, dimension, context=UNSET): return process('reduce_dimension_binary', data=data, reducer=reducer, dimension=dimension, context=context) -def rename_dimension(data, source, target): +def rename_dimension(data, source, target) -> ProcessBuilder: """ Rename a dimension @@ -3213,7 +3213,7 @@ def rename_dimension(data, source, target): return process('rename_dimension', data=data, source=source, target=target) -def rename_labels(data, dimension, target, source=UNSET): +def rename_labels(data, dimension, target, source=UNSET) -> ProcessBuilder: """ Rename dimension labels @@ -3235,7 +3235,7 @@ def rename_labels(data, dimension, target, source=UNSET): return process('rename_labels', data=data, dimension=dimension, target=target, source=source) -def resample_cube_spatial(data, target, method=UNSET): +def resample_cube_spatial(data, target, method=UNSET) -> ProcessBuilder: """ Resample the spatial dimensions to match a target data cube @@ -3251,7 +3251,7 @@ def resample_cube_spatial(data, target, method=UNSET): return process('resample_cube_spatial', data=data, target=target, method=method) -def resample_cube_temporal(data, target, method, dimension=UNSET, context=UNSET): +def resample_cube_temporal(data, target, method, dimension=UNSET, context=UNSET) -> ProcessBuilder: """ Resample a temporal dimension to match a target data cube @@ -3273,7 +3273,7 @@ def resample_cube_temporal(data, target, method, dimension=UNSET, context=UNSET) return process('resample_cube_temporal', data=data, target=target, method=method, dimension=dimension, context=context) -def resample_spatial(data, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET): +def resample_spatial(data, resolution=UNSET, projection=UNSET, method=UNSET, align=UNSET) -> ProcessBuilder: """ Resample and warp the spatial dimensions @@ -3297,7 +3297,7 @@ def resample_spatial(data, resolution=UNSET, projection=UNSET, method=UNSET, ali return process('resample_spatial', data=data, resolution=resolution, projection=projection, method=method, align=align) -def round(x, p=UNSET): +def round(x, p=UNSET) -> ProcessBuilder: """ Round to a specified precision @@ -3311,7 +3311,7 @@ def round(x, p=UNSET): return process('round', x=x, p=p) -def run_udf(data, udf, runtime, version=UNSET, context=UNSET): +def run_udf(data, udf, runtime, version=UNSET, context=UNSET) -> ProcessBuilder: """ Run an UDF @@ -3330,7 +3330,7 @@ def run_udf(data, udf, runtime, version=UNSET, context=UNSET): return process('run_udf', data=data, udf=udf, runtime=runtime, version=version, context=context) -def run_udf_externally(data, url, context=UNSET): +def run_udf_externally(data, url, context=UNSET) -> ProcessBuilder: """ Run an externally hosted UDF container @@ -3346,7 +3346,7 @@ def run_udf_externally(data, url, context=UNSET): return process('run_udf_externally', data=data, url=url, context=context) -def save_result(data, format, options=UNSET): +def save_result(data, format, options=UNSET) -> ProcessBuilder: """ Save processed data to storage @@ -3364,7 +3364,7 @@ def save_result(data, format, options=UNSET): return process('save_result', data=data, format=format, options=options) -def sd(data, ignore_nodata=UNSET): +def sd(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Standard deviation @@ -3377,7 +3377,7 @@ def sd(data, ignore_nodata=UNSET): return process('sd', data=data, ignore_nodata=ignore_nodata) -def sgn(x): +def sgn(x) -> ProcessBuilder: """ Signum @@ -3388,7 +3388,7 @@ def sgn(x): return process('sgn', x=x) -def sin(x): +def sin(x) -> ProcessBuilder: """ Sine @@ -3399,7 +3399,7 @@ def sin(x): return process('sin', x=x) -def sinh(x): +def sinh(x) -> ProcessBuilder: """ Hyperbolic sine @@ -3410,7 +3410,7 @@ def sinh(x): return process('sinh', x=x) -def sort(data, asc=UNSET, nodata=UNSET): +def sort(data, asc=UNSET, nodata=UNSET) -> ProcessBuilder: """ Sort data @@ -3425,7 +3425,7 @@ def sort(data, asc=UNSET, nodata=UNSET): return process('sort', data=data, asc=asc, nodata=nodata) -def sqrt(x): +def sqrt(x) -> ProcessBuilder: """ Square root @@ -3436,7 +3436,7 @@ def sqrt(x): return process('sqrt', x=x) -def subtract(x, y): +def subtract(x, y) -> ProcessBuilder: """ Subtraction of two numbers @@ -3448,7 +3448,7 @@ def subtract(x, y): return process('subtract', x=x, y=y) -def sum(data, ignore_nodata=UNSET): +def sum(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Compute the sum by adding up numbers @@ -3461,7 +3461,7 @@ def sum(data, ignore_nodata=UNSET): return process('sum', data=data, ignore_nodata=ignore_nodata) -def tan(x): +def tan(x) -> ProcessBuilder: """ Tangent @@ -3472,7 +3472,7 @@ def tan(x): return process('tan', x=x) -def tanh(x): +def tanh(x) -> ProcessBuilder: """ Hyperbolic tangent @@ -3483,7 +3483,7 @@ def tanh(x): return process('tanh', x=x) -def text_begins(data, pattern, case_sensitive=UNSET): +def text_begins(data, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text begins with another text @@ -3496,7 +3496,7 @@ def text_begins(data, pattern, case_sensitive=UNSET): return process('text_begins', data=data, pattern=pattern, case_sensitive=case_sensitive) -def text_contains(data, pattern, case_sensitive=UNSET): +def text_contains(data, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text contains another text @@ -3509,7 +3509,7 @@ def text_contains(data, pattern, case_sensitive=UNSET): return process('text_contains', data=data, pattern=pattern, case_sensitive=case_sensitive) -def text_ends(data, pattern, case_sensitive=UNSET): +def text_ends(data, pattern, case_sensitive=UNSET) -> ProcessBuilder: """ Text ends with another text @@ -3522,7 +3522,7 @@ def text_ends(data, pattern, case_sensitive=UNSET): return process('text_ends', data=data, pattern=pattern, case_sensitive=case_sensitive) -def text_merge(data, separator=UNSET): +def text_merge(data, separator=UNSET) -> ProcessBuilder: """ Concatenate elements to a string @@ -3536,7 +3536,7 @@ def text_merge(data, separator=UNSET): return process('text_merge', data=data, separator=separator) -def trim_cube(data): +def trim_cube(data) -> ProcessBuilder: """ Remove dimension labels with no-data values @@ -3548,7 +3548,7 @@ def trim_cube(data): return process('trim_cube', data=data) -def variance(data, ignore_nodata=UNSET): +def variance(data, ignore_nodata=UNSET) -> ProcessBuilder: """ Variance @@ -3561,7 +3561,7 @@ def variance(data, ignore_nodata=UNSET): return process('variance', data=data, ignore_nodata=ignore_nodata) -def xor(x, y): +def xor(x, y) -> ProcessBuilder: """ Logical XOR (exclusive or) diff --git a/tests/processes/test_generator.py b/tests/processes/test_generator.py index 92e905e81..f04f55512 100644 --- a/tests/processes/test_generator.py +++ b/tests/processes/test_generator.py @@ -102,6 +102,29 @@ def foo(x, y=UNSET): return process('foo', x=x, y=y)''') +def test_render_return_type_hint(): + process = Process.from_dict({ + "id": "incr", + "description": "Increment a value", + "summary": "Increment a value", + "parameters": [{"name": "x", "description": "value", "schema": {"type": "integer"}}], + "returns": {"description": "incremented value", "schema": {"type": "integer"}} + }) + + renderer = PythonRenderer(return_type_hint="FooBar") + src = renderer.render_process(process) + assert src == dedent('''\ + def incr(x) -> FooBar: + """ + Increment a value + + :param x: value + + :return: incremented value + """ + return process('incr', x=x)''') + + def test_render_oo_no_params(): process = Process.from_dict({ "id": "pi", @@ -112,12 +135,13 @@ def test_render_oo_no_params(): }) renderer = PythonRenderer(oo_mode=True) - src = renderer.render_process(process) + src = "class Consts:\n" + renderer.render_process(process) assert src == dedent('''\ - def pi(self): - """ - Pi - - :return: value of pi - """ - return process('pi', )''') + class Consts: + def pi(self): + """ + Pi + + :return: value of pi + """ + return process('pi', )''') From 49730bad3677db32ddc9f0f67e3b2a1b92beb52e Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Fri, 11 Sep 2020 11:59:38 +0200 Subject: [PATCH 4/9] EP-3555 improve test coverage of callback process building --- openeo/processes/generator.py | 18 +++- openeo/processes/processes.py | 16 ++- openeo/rest/datacube.py | 6 +- tests/data/1.0.0/apply_chain.json | 49 +++++++++ tests/data/1.0.0/apply_math.json | 60 ++++++++++++ tests/rest/datacube/test_processbuilder.py | 109 ++++++++++++++++----- 6 files changed, 227 insertions(+), 31 deletions(-) create mode 100644 tests/data/1.0.0/apply_chain.json create mode 100644 tests/data/1.0.0/apply_math.json diff --git a/openeo/processes/generator.py b/openeo/processes/generator.py index 2d9cc7fae..e9faa4dcb 100644 --- a/openeo/processes/generator.py +++ b/openeo/processes/generator.py @@ -96,10 +96,24 @@ def generate_process_py(processes_dir: Union[Path, str], output=sys.stdout): class ProcessBuilder(ProcessBuilderBase): - def __add__(self, other): + def __add__(self, other) -> 'ProcessBuilder': return self.add(other) - + def __sub__(self, other) -> 'ProcessBuilder': + return self.subtract(other) + + def __mul__(self, other) -> 'ProcessBuilder': + return self.multiply(other) + + def __truediv__(self, other) -> 'ProcessBuilder': + return self.divide(other) + + def __neg__(self) -> 'ProcessBuilder': + return self.multiply(-1) + + def __pow__(self, other) -> 'ProcessBuilder': + return self.power(other) + """) fun_src = textwrap.dedent(""" # Shortcut diff --git a/openeo/processes/processes.py b/openeo/processes/processes.py index f96c7e67f..32db9740c 100644 --- a/openeo/processes/processes.py +++ b/openeo/processes/processes.py @@ -7,9 +7,23 @@ class ProcessBuilder(ProcessBuilderBase): - def __add__(self, other): + def __add__(self, other) -> 'ProcessBuilder': return self.add(other) + def __sub__(self, other) -> 'ProcessBuilder': + return self.subtract(other) + + def __mul__(self, other) -> 'ProcessBuilder': + return self.multiply(other) + + def __truediv__(self, other) -> 'ProcessBuilder': + return self.divide(other) + + def __neg__(self) -> 'ProcessBuilder': + return self.multiply(-1) + + def __pow__(self, other) -> 'ProcessBuilder': + return self.power(other) def absolute(self) -> 'ProcessBuilder': """ diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index d13a17f4f..4c142f7fb 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -361,9 +361,6 @@ def __lt__(self, other: Union['DataCube', int, float]) -> 'DataCube': """ return self._operator_binary("lt", other) - def __truediv__(self, other) -> 'DataCube': - return self.divide(other) - def __add__(self, other) -> 'DataCube': return self.add(other) @@ -385,6 +382,9 @@ def __mul__(self, other) -> 'DataCube': def __rmul__(self, other) -> 'DataCube': return self.multiply(other, reverse=True) + def __truediv__(self, other) -> 'DataCube': + return self.divide(other) + def __rpow__(self, other) -> 'DataCube': return self.power(other,reverse=True) diff --git a/tests/data/1.0.0/apply_chain.json b/tests/data/1.0.0/apply_chain.json new file mode 100644 index 000000000..07df15781 --- /dev/null +++ b/tests/data/1.0.0/apply_chain.json @@ -0,0 +1,49 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "apply1": { + "process_id": "apply", + "arguments": { + "data": { + "from_node": "loadcollection1" + }, + "process": { + "process_graph": { + "absolute1": { + "process_id": "absolute", + "arguments": { + "x": { + "from_parameter": "x" + } + } + }, + "cos1": { + "process_id": "cos", + "arguments": { + "x": { + "from_node": "absolute1" + } + } + }, + "add1": { + "process_id": "add", + "arguments": { + "x": { + "from_node": "cos1" + }, + "y": 1.23 + }, + "result": true + } + } + } + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/data/1.0.0/apply_math.json b/tests/data/1.0.0/apply_math.json new file mode 100644 index 000000000..b7bfbcc4f --- /dev/null +++ b/tests/data/1.0.0/apply_math.json @@ -0,0 +1,60 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "apply1": { + "process_id": "apply", + "arguments": { + "data": { + "from_node": "loadcollection1" + }, + "process": { + "process_graph": { + "add1": { + "process_id": "add", + "arguments": { + "x": { + "from_parameter": "x" + }, + "y": 1 + } + }, + "subtract1": { + "process_id": "subtract", + "arguments": { + "x": { + "from_node": "add1" + }, + "y": 2 + } + }, + "multiply1": { + "process_id": "multiply", + "arguments": { + "x": { + "from_node": "subtract1" + }, + "y": 3 + } + }, + "divide1": { + "process_id": "divide", + "arguments": { + "x": { + "from_node": "multiply1" + }, + "y": 4 + }, + "result": true + } + } + } + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index b71a81df3..c71bcf011 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -3,32 +3,106 @@ from ... import load_json_resource -def test_apply_absolute_callback_lambda_method(con100): +def test_apply_callback_absolute_lambda_method(con100): im = con100.load_collection("S2") result = im.apply(lambda data: data.absolute()) - expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') - assert result.graph == expected_graph + assert result.graph == load_json_resource('data/1.0.0/apply_absolute.json') -def test_apply_absolute_callback_function(con100): +def test_apply_callback_absolute_function(con100): im = con100.load_collection("S2") from openeo.processes.processes import absolute result = im.apply(absolute) - expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') - assert result.graph == expected_graph + assert result.graph == load_json_resource('data/1.0.0/apply_absolute.json') -def test_apply_absolute_callback_function_custom(con100): +def test_apply_callback_absolute_custom_function(con100): def abs(x: ProcessBuilder) -> ProcessBuilder: return x.absolute() im = con100.load_collection("S2") result = im.apply(abs) - expected_graph = load_json_resource('data/1.0.0/apply_absolute.json') - assert result.graph == expected_graph + assert result.graph == load_json_resource('data/1.0.0/apply_absolute.json') -def check_apply_neighbors(neighbors): +def test_apply_callback_chain_lambda_method(con100): + im = con100.load_collection("S2") + result = im.apply(lambda data: data.absolute().cos().add(y=1.23)) + assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') + + +def test_apply_callback_chain_lambda_functions(con100): + im = con100.load_collection("S2") + from openeo.processes.processes import absolute, cos, add + result = im.apply(lambda data: add(cos(absolute(data)), 1.23)) + assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') + + +def test_apply_callback_chain_lambda_mixed_and_operator(con100): + im = con100.load_collection("S2") + from openeo.processes.processes import cos + result = im.apply(lambda data: cos(data.absolute()) + 1.23) + assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') + + +def test_apply_callback_chain_custom_function_methods(con100): + def transform(x: ProcessBuilder) -> ProcessBuilder: + return x.absolute().cos().add(y=1.23) + + im = con100.load_collection("S2") + result = im.apply(transform) + assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') + + +def test_apply_callback_chain_custom_function_functions(con100): + from openeo.processes.processes import absolute, cos, add + + def transform(x: ProcessBuilder) -> ProcessBuilder: + return add(cos(absolute(x)), y=1.23) + + im = con100.load_collection("S2") + result = im.apply(transform) + assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') + + +def test_apply_callback_chain_custom_function_mixed_and_operator(con100): + from openeo.processes.processes import cos + + def transform(x: ProcessBuilder) -> ProcessBuilder: + return cos(x.absolute()) + 1.23 + + im = con100.load_collection("S2") + result = im.apply(transform) + assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') + + +def test_apply_callback_math_lambda(con100): + im = con100.load_collection("S2") + result = im.apply(lambda data: (((data + 1) - 2) * 3) / 4) + assert result.graph == load_json_resource('data/1.0.0/apply_math.json') + + +def test_apply_callback_math_custom_function(con100): + def do_math(data: ProcessBuilder) -> ProcessBuilder: + return (((data + 1) - 2) * 3) / 4 + + im = con100.load_collection("S2") + result = im.apply(do_math) + assert result.graph == load_json_resource('data/1.0.0/apply_math.json') + + +def test_apply_neighborhood_udf_callback(con100): + + def callback(data: ProcessBuilder): + return data.run_udf(udf='myfancycode', runtime='Python') + + collection = con100.load_collection("S2") + neighbors = collection.apply_neighborhood(process=callback, size=[ + {'dimension': 'x', 'value': 128, 'unit': 'px'}, + {'dimension': 'y', 'value': 128, 'unit': 'px'} + ], overlap=[ + {'dimension': 't', 'value': 'P10d'}, + ]) actual_graph = neighbors.graph['applyneighborhood1'] assert actual_graph == { 'process_id': 'apply_neighborhood', @@ -51,21 +125,6 @@ def check_apply_neighbors(neighbors): } -def test_apply_neighborhood_udf_callback(con100): - collection = con100.load_collection("S2") - - def callback(data: ProcessBuilder): - return data.run_udf(udf='myfancycode', runtime='Python') - - neighbors = collection.apply_neighborhood(process=callback, size=[ - {'dimension': 'x', 'value': 128, 'unit': 'px'}, - {'dimension': 'y', 'value': 128, 'unit': 'px'} - ], overlap=[ - {'dimension': 't', 'value': 'P10d'}, - ]) - check_apply_neighbors(neighbors) - - def test_apply_neighborhood_complex_callback(con100): collection = con100.load_collection("S2") From 5e8f6b324c4bd6b67c0c804c9a24adbdd99864ef Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Fri, 11 Sep 2020 17:48:57 +0200 Subject: [PATCH 5/9] EP-3555 unify callback handling in DataCube unify callback handling: - allow specifying as string, PGNode or Callable - de-boilerplate it in `apply_dimension`, `reduce_dimension`, `apply_neighborhood`, `apply`, `merge_cubes`, `aggregate_spatial`, `load_collection` --- openeo/internal/graph_building.py | 8 +- openeo/processes/builder.py | 2 + openeo/rest/datacube.py | 186 ++++++++++-------- .../data/1.0.0/apply_dimension_bandmath.json | 54 +++++ tests/data/1.0.0/apply_dimension_max.json | 33 ++++ tests/data/1.0.0/apply_neighborhood_trim.json | 44 +++++ .../data/1.0.0/reduce_dimension_bandmath.json | 54 +++++ tests/data/1.0.0/reduce_dimension_max.json | 33 ++++ tests/rest/datacube/test_datacube100.py | 16 ++ tests/rest/datacube/test_processbuilder.py | 181 ++++++++--------- 10 files changed, 433 insertions(+), 178 deletions(-) create mode 100644 tests/data/1.0.0/apply_dimension_bandmath.json create mode 100644 tests/data/1.0.0/apply_dimension_max.json create mode 100644 tests/data/1.0.0/apply_neighborhood_trim.json create mode 100644 tests/data/1.0.0/reduce_dimension_bandmath.json create mode 100644 tests/data/1.0.0/reduce_dimension_max.json diff --git a/openeo/internal/graph_building.py b/openeo/internal/graph_building.py index 4df500686..07585e97c 100644 --- a/openeo/internal/graph_building.py +++ b/openeo/internal/graph_building.py @@ -12,7 +12,7 @@ class PGNode: """ - Generic node in a process graph. + Wrapper for process node in a process graph (has process_id and arguments). While this is a simple, thin container, it allows a bit more abstraction, basic encapsulation, type hinting and code intelligence in your IDE than something generic like a dict. @@ -71,7 +71,7 @@ def flatten(self): return GraphFlattener().flatten(node=self) @staticmethod - def to_process_graph_argument(value: Union['PGNode', str, dict]): + def to_process_graph_argument(value: Union['PGNode', str, dict]) -> dict: """ Normalize given argument properly to a "process_graph" argument to be used as reducer/subprocess for processes like @@ -79,6 +79,7 @@ def to_process_graph_argument(value: Union['PGNode', str, dict]): """ if isinstance(value, str): # assume string with predefined reduce/apply process ("mean", "sum", ...) + # TODO: is this case still used? It's invalid anyway for 1.0 openEO spec I think? return value elif isinstance(value, PGNode): return {"process_graph": value} @@ -109,12 +110,13 @@ def __init__(self, code:str,runtime:str,data=None,version:str = None,context:Dic super().__init__(process_id='run_udf', arguments=arguments) + class ReduceNode(PGNode): """ A process graph node for "reduce" processes (has a reducer sub-process-graph) """ - def __init__(self, data: PGNode, reducer: Union[PGNode, str], dimension: str, process_id="reduce_dimension", + def __init__(self, data: PGNode, reducer: Union[PGNode, str, dict], dimension: str, process_id="reduce_dimension", band_math_mode: bool = False): assert process_id in ("reduce_dimension", "reduce_dimension_binary") arguments = { diff --git a/openeo/processes/builder.py b/openeo/processes/builder.py index ec83b6ca6..1ef81daa3 100644 --- a/openeo/processes/builder.py +++ b/openeo/processes/builder.py @@ -11,6 +11,8 @@ class ProcessBuilderBase: by calling functions. """ + # TODO: can this implementation be merged with PGNode directly? + def __init__(self, pgnode: Union[PGNode, dict]): self.pgnode = pgnode diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index 4c142f7fb..023fceee7 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -67,10 +67,6 @@ def graph(self) -> dict: """Get the process graph in flattened dict representation""" return self.flatten() - @property - def processgraph_node(self) -> PGNode: - return self._pg - def flatten(self) -> dict: """Get the process graph in flattened dict representation""" return self._pg.flatten() @@ -124,7 +120,7 @@ def load_collection( temporal_extent: Union[List[Union[str, datetime.datetime, datetime.date]], None] = None, bands: Union[List[str], None] = None, fetch_metadata=True, - properties: Dict[str, PGNode] = None + properties: Dict[str, Union[str, PGNode, typing.Callable]] = None ): """ Create a new Raster Data cube. @@ -153,7 +149,7 @@ def load_collection( arguments['bands'] = bands if properties: arguments['properties'] = { - prop: PGNode.to_process_graph_argument(pred) + prop: cls._get_callback(pred, parameter_mapping={"x": "value"}) for prop, pred in properties.items() } pg = PGNode( @@ -498,8 +494,56 @@ def zonal_statistics(self, regions, func, scale=1000, interval="day") -> 'DataCu return self.process(process_id, args) + @staticmethod + def _get_callback(process: Union[str, PGNode, typing.Callable], parameter_mapping: dict) -> dict: + """ + Build a "callback" process: a user defined process that is used by another process (such + as `apply`, `apply_dimension`, `reduce`, ....) + + :param process: process id string, PGNode or callable that uses the ProcessBuilder mechanism to build a process + :parameter parameter_mapping: mapping of child (callback) parameters names to parent process parameter names + :return: + """ + + def get_args(parameter_mapping: dict, factory: typing.Callable) -> dict: + """Helper to build a dict of process arguments (in desired format).""" + args = {} + for child_param, parent_param in parameter_mapping.items(): + if isinstance(parent_param, str): + args[child_param] = factory(parent_param) + elif isinstance(parent_param, list): + args[child_param] = [factory(p) for p in parent_param] + else: + raise ValueError(parent_param) + return args + + # TODO: autodetect the parameters defined by process? + if isinstance(process, str): + # Assume given reducer is a simple predefined reduce process_id + pg = PGNode( + process_id=process, + arguments=get_args(parameter_mapping, factory=lambda p: {"from_parameter": p}) + ) + elif isinstance(process, PGNode): + # Assume this is already a valid callback process + # TODO: check used parameters against expected ones? + pg=process + elif isinstance(process, typing.Callable): + args = get_args(parameter_mapping, factory=lambda p: ProcessBuilder.from_parameter(p)) + # Only call with kwargs to avoid being picky towards user about argument names when there is no confusion. + if len(args) < 2: + pg = process(*args.values()).pgnode + else: + pg = process(**args).pgnode + else: + raise ValueError(process) + + return PGNode.to_process_graph_argument(pg) + def apply_dimension( - self, code: str=None, runtime=None,process:typing.Callable=None, version="latest", dimension='t', target_dimension=None + self, code: str = None, runtime=None, + process: [str, PGNode, typing.Callable] = None, + version="latest", dimension='t', target_dimension=None ) -> 'DataCube': """ Applies a process to all pixel values along a dimension of a raster data cube. For example, @@ -533,31 +577,30 @@ def apply_dimension( :raises: DimensionNotAvailable """ if runtime: + # TODO EP-3555: unify better with UDF(PGNode) class and avoid doing same UDF code-runtime-version argument stuff in each method callback_process_node = self._create_run_udf(code, runtime, version) - elif isinstance(code,str): - callback_process_node = PGNode( - process_id=code, - arguments={"data": {"from_parameter": "data"}}, - ) - elif isinstance(process, typing.Callable): - builder = process(ProcessBuilder.from_parameter("data")) - callback_process_node = builder.pgnode + process = PGNode.to_process_graph_argument(callback_process_node) + elif code or process: + # TODO EP-3555 unify `code` and `process` + process = self._get_callback(code or process, parameter_mapping={"data": "data"}) else: - raise OpenEoClientException("No code or process given") + raise OpenEoClientException("No UDF code or process given") arguments = { - "data": self._pg, - "process": PGNode.to_process_graph_argument(callback_process_node), + "data": THIS, + "process": process, "dimension": self.metadata.assert_valid_dimension(dimension), # TODO #125 arguments: context } if target_dimension is not None: arguments["target_dimension"] = target_dimension - result_cube = self.process_with_node(PGNode(process_id="apply_dimension", arguments=arguments)) + result_cube = self.process(process_id="apply_dimension", arguments=arguments) return result_cube - def reduce_dimension(self, dimension: str, reducer: Union[typing.Callable, str], - process_id="reduce_dimension", band_math_mode: bool = False) -> 'DataCube': + def reduce_dimension( + self, dimension: str, reducer: Union[str, PGNode, typing.Callable], + process_id="reduce_dimension", band_math_mode: bool = False + ) -> 'DataCube': """ Add a reduce process with given reducer callback along given dimension @@ -566,11 +609,7 @@ def reduce_dimension(self, dimension: str, reducer: Union[typing.Callable, str], """ # TODO: check if dimension is valid according to metadata? #116 # TODO: #125 use/test case for `reduce_dimension_binary`? - if isinstance(reducer, str): - # Assume given reducer is a simple predefined reduce process_id - reducer = PGNode(process_id=reducer, arguments={"data": {"from_parameter": "data"}}) - elif isinstance(reducer, typing.Callable): - reducer = reducer(ProcessBuilder.from_parameter("data")).pgnode + reducer = self._get_callback(reducer, parameter_mapping={"data": "data"}) return self.process_with_node(ReduceNode( process_id=process_id, @@ -593,6 +632,7 @@ def reduce_bands_udf(self, code: str, runtime="Python", version="latest") -> 'Da """ Apply reduce (`reduce_dimension`) process with given UDF along band/spectral dimension. """ + # TODO EP-3555: unify better with UDF(PGNode) class and avoid doing same UDF code-runtime-version argument stuff in each method return self._reduce_bands(reducer=self._create_run_udf(code, runtime, version)) def add_dimension(self, name: str, label: str, type: str = None): @@ -603,7 +643,7 @@ def add_dimension(self, name: str, label: str, type: str = None): ) def _create_run_udf(self, code, runtime, version) -> PGNode: - # TODO: expose this publicly (or create dedicated PGNode subclass)? Also encapsulate/decouple UDF loading better? + # TODO EP-3555: unify better with UDF(PGNode) class return PGNode( process_id="run_udf", arguments={ @@ -619,6 +659,7 @@ def reduce_temporal_udf(self, code: str, runtime="Python", version="latest"): """ Apply reduce (`reduce_dimension`) process with given UDF along temporal dimension. """ + # TODO EP-3555: unify better with UDF(PGNode) class and avoid doing same UDF code-runtime-version argument stuff in each method return self._reduce_temporal(reducer=self._create_run_udf(code, runtime, version)) @deprecated("use `reduce_temporal_udf` instead") @@ -634,7 +675,10 @@ def reduce_tiles_over_time(self, code: str, runtime="Python", version="latest"): """ return self.reduce_temporal_udf(code=code, runtime=runtime, version=version) - def apply_neighborhood(self, size:List[Dict],overlap:List[Dict]=[],process:PGNode = None) -> 'DataCube': + def apply_neighborhood( + self, process: [str, PGNode, typing.Callable], + size: List[Dict], overlap: List[dict] = None + ) -> 'DataCube': """ Applies a focal process to a data cube. @@ -651,23 +695,17 @@ def apply_neighborhood(self, size:List[Dict],overlap:List[Dict]=[],process:PGNod :param process: a callback function that creates a process graph, see :ref:`callbackfunctions` :return: """ - args = { - "data": self._pg, - "process": {"process_graph": process}, - "size": size, - "overlap": overlap - } - result_cube = self.process_with_node(PGNode( + return self.process( process_id='apply_neighborhood', - arguments=args - )) - if isinstance(process, typing.Callable): - process_builder = process(ProcessBuilder.from_parameter("data")) - result_cube.processgraph_node.arguments['process'] = {'process_graph': process_builder.pgnode} - - return result_cube + arguments=dict_no_none( + data=THIS, + process=self._get_callback(process, parameter_mapping={"data": "data"}), + size=size, + overlap=overlap + ) + ) - def apply(self, process: Union[str, PGNode]=None, data_argument='x') -> 'DataCube': + def apply(self, process: Union[str, PGNode, typing.Callable] = None, data_argument="x") -> 'DataCube': """ Applies a unary process (a local operation) to each value of the specified or all dimensions in the data cube. @@ -675,25 +713,14 @@ def apply(self, process: Union[str, PGNode]=None, data_argument='x') -> 'DataCub :param dimensions: The names of the dimensions to apply the process on. Defaults to an empty array so that all dimensions are used. :return: A data cube with the newly computed values. The resolution, cardinality and the number of dimensions are the same as for the original data cube. """ - if isinstance(process, str): - # Simple single string process specification - process = PGNode( - process_id=process, - arguments={data_argument: {"from_parameter": "x"}} - ) - result_cube = self.process_with_node(PGNode( - process_id='apply', + return self.process( + process_id="apply", arguments={ - "data": self._pg, - "process": {"process_graph": process}, + "data": THIS, + "process": self._get_callback(process, parameter_mapping={data_argument: "x"}), # TODO #125 context } - )) - if isinstance(process, typing.Callable): - process_builder = process(ProcessBuilder.from_parameter("x")) - result_cube.processgraph_node.arguments['process'] = {'process_graph': process_builder.pgnode} - - return result_cube + ) def reduce_temporal_simple(self, process_id="max") -> 'DataCube': """Do temporal reduce with a simple given process as callback.""" @@ -905,32 +932,21 @@ def mask_polygon( ) ) - def merge(self, other: 'DataCube', overlap_resolver: Union[str, typing.Callable] = None) -> 'DataCube': + def merge_cubes( + self, other: 'DataCube', overlap_resolver: Union[str, PGNode, typing.Callable] = None + ) -> 'DataCube': arguments = { 'cube1': {'from_node': self._pg}, 'cube2': {'from_node': other._pg}, } if overlap_resolver: - if isinstance(overlap_resolver, str): - # Simple resolver (specified as process_id string) - overlap_resolver_node = PGNode( - process_id=overlap_resolver, - arguments={"data": [{"from_parameter": "x"}, {"from_parameter": "y"}]} - ) - elif isinstance(overlap_resolver,typing.Callable): - process_builder = overlap_resolver( - ProcessBuilder.from_parameter("x"), ProcessBuilder.from_parameter("y") - ) - overlap_resolver_node = process_builder.pgnode - elif isinstance(overlap_resolver, PGNode): - overlap_resolver_node = overlap_resolver - else: - raise ValueError("Unsupported overlap_resolver: %s" % str(overlap_resolver)) - - arguments["overlap_resolver"] = {"process_graph": overlap_resolver_node} + arguments["overlap_resolver"] = self._get_callback(overlap_resolver, parameter_mapping={"data": ["x", "y"]}) # TODO #125 context # TODO: set metadata of reduced cube? - return self.process_with_node(PGNode(process_id="merge_cubes", arguments=arguments)) + return self.process(process_id="merge_cubes", arguments=arguments) + + # Legacy alias + merge = merge_cubes def apply_kernel(self, kernel: Union[np.ndarray, List[List[float]]], factor=1.0, border = 0, replace_invalid=0) -> 'DataCube': """ @@ -1024,8 +1040,9 @@ def polygonal_standarddeviation_timeseries(self, polygon: Union[Polygon, MultiPo return self._polygonal_timeseries(polygon, "sd") - def _polygonal_timeseries(self, polygon: Union[Polygon, MultiPolygon, str], func: str) -> 'DataCube': - + def _polygonal_timeseries( + self, polygon: Union[Polygon, MultiPolygon, str], func: Union[str, PGNode, typing.Callable] + ) -> 'DataCube': if isinstance(polygon, str): # polygon is a path to vector file # TODO this is non-standard process: check capabilities? #104 #40 @@ -1039,18 +1056,15 @@ def _polygonal_timeseries(self, polygon: Union[Polygon, MultiPolygon, str], func } } - return self.process_with_node(PGNode( + return self.process( process_id="aggregate_spatial", arguments={ "data": self._pg, "geometries": geometries, - "reducer": {"process_graph": PGNode( - process_id=func, - arguments={"data": {"from_parameter": "data"}} - )}, + "reducer": self._get_callback(func, parameter_mapping={"data":"data"}) # TODO #125 target dimension, context } - )) + ) def save_result(self, format: str = "GTiff", options: dict = None): formats = set(self._connection.list_output_formats().keys()) diff --git a/tests/data/1.0.0/apply_dimension_bandmath.json b/tests/data/1.0.0/apply_dimension_bandmath.json new file mode 100644 index 000000000..af0144abc --- /dev/null +++ b/tests/data/1.0.0/apply_dimension_bandmath.json @@ -0,0 +1,54 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "applydimension1": { + "process_id": "apply_dimension", + "arguments": { + "data": { + "from_node": "loadcollection1" + }, + "dimension": "bands", + "process": { + "process_graph": { + "arrayelement1": { + "process_id": "array_element", + "arguments": { + "data": { + "from_parameter": "data" + }, + "index": 1 + } + }, + "arrayelement2": { + "process_id": "array_element", + "arguments": { + "data": { + "from_parameter": "data" + }, + "index": 2 + } + }, + "add1": { + "process_id": "add", + "arguments": { + "x": { + "from_node": "arrayelement1" + }, + "y": { + "from_node": "arrayelement2" + } + }, + "result": true + } + } + } + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/data/1.0.0/apply_dimension_max.json b/tests/data/1.0.0/apply_dimension_max.json new file mode 100644 index 000000000..e1a54020f --- /dev/null +++ b/tests/data/1.0.0/apply_dimension_max.json @@ -0,0 +1,33 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "applydimension1": { + "process_id": "apply_dimension", + "arguments": { + "data": { + "from_node": "loadcollection1" + }, + "dimension": "bands", + "process": { + "process_graph": { + "max1": { + "process_id": "max", + "arguments": { + "data": { + "from_parameter": "data" + } + }, + "result": true + } + } + } + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/data/1.0.0/apply_neighborhood_trim.json b/tests/data/1.0.0/apply_neighborhood_trim.json new file mode 100644 index 000000000..bdbd6630c --- /dev/null +++ b/tests/data/1.0.0/apply_neighborhood_trim.json @@ -0,0 +1,44 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "applyneighborhood1": { + "process_id": "apply_neighborhood", + "arguments": { + "data": { + "from_node": "loadcollection1" + }, + "process": { + "process_graph": { + "trimcube1": { + "process_id": "trim_cube", + "arguments": { + "data": { + "from_parameter": "data" + } + }, + "result": true + } + } + }, + "size": [ + { + "dimension": "x", + "unit": "px", + "value": 128 + }, + { + "dimension": "y", + "unit": "px", + "value": 128 + } + ] + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/data/1.0.0/reduce_dimension_bandmath.json b/tests/data/1.0.0/reduce_dimension_bandmath.json new file mode 100644 index 000000000..292874ea0 --- /dev/null +++ b/tests/data/1.0.0/reduce_dimension_bandmath.json @@ -0,0 +1,54 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "reducedimension1": { + "process_id": "reduce_dimension", + "arguments": { + "data": { + "from_node": "loadcollection1" + }, + "dimension": "bands", + "reducer": { + "process_graph": { + "arrayelement1": { + "process_id": "array_element", + "arguments": { + "data": { + "from_parameter": "data" + }, + "index": 1 + } + }, + "arrayelement2": { + "process_id": "array_element", + "arguments": { + "data": { + "from_parameter": "data" + }, + "index": 2 + } + }, + "add1": { + "process_id": "add", + "arguments": { + "x": { + "from_node": "arrayelement1" + }, + "y": { + "from_node": "arrayelement2" + } + }, + "result": true + } + } + } + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/data/1.0.0/reduce_dimension_max.json b/tests/data/1.0.0/reduce_dimension_max.json new file mode 100644 index 000000000..e4bbfaef6 --- /dev/null +++ b/tests/data/1.0.0/reduce_dimension_max.json @@ -0,0 +1,33 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "reducedimension1": { + "process_id": "reduce_dimension", + "arguments": { + "data": { + "from_node": "loadcollection1" + }, + "dimension": "bands", + "reducer": { + "process_graph": { + "max1": { + "process_id": "max", + "arguments": { + "data": { + "from_parameter": "data" + } + }, + "result": true + } + } + } + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/rest/datacube/test_datacube100.py b/tests/rest/datacube/test_datacube100.py index fa2826a8c..d175fc31d 100644 --- a/tests/rest/datacube/test_datacube100.py +++ b/tests/rest/datacube/test_datacube100.py @@ -279,6 +279,22 @@ def between(min, max) -> PGNode: assert im.graph == expected +def test_load_collection_properties_process_builder_function(con100): + from openeo.processes.processes import between, eq + im = con100.load_collection( + "S2", + spatial_extent={"west": 16.1, "east": 16.6, "north": 48.6, "south": 47.2}, + temporal_extent=["2018-01-01", "2019-01-01"], + properties={ + "eo:cloud_cover": lambda x: between(x=x, min=0, max=50), + "platform": lambda x: eq(x=x, y="Sentinel-2B", case_sensitive=False) + } + ) + + expected = load_json_resource('data/1.0.0/load_collection_properties.json') + assert im.graph == expected + + def test_apply_dimension_temporal_cumsum_with_target(con100): cumsum = con100.load_collection("S2").apply_dimension('cumsum', dimension="t", target_dimension="MyNewTime") actual_graph = cumsum.graph diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index c71bcf011..2ded8ace2 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -1,3 +1,4 @@ +from openeo.internal.graph_building import PGNode from openeo.processes.processes import ProcessBuilder from ... import load_json_resource @@ -91,8 +92,44 @@ def do_math(data: ProcessBuilder) -> ProcessBuilder: assert result.graph == load_json_resource('data/1.0.0/apply_math.json') -def test_apply_neighborhood_udf_callback(con100): +def test_apply_neighborhood_trim_str(con100): + im = con100.load_collection("S2") + result = im.apply_neighborhood( + process="trim_cube", + size=[{'dimension': 'x', 'value': 128, 'unit': 'px'}, {'dimension': 'y', 'value': 128, 'unit': 'px'}] + ) + assert result.graph == load_json_resource('data/1.0.0/apply_neighborhood_trim.json') + + +def test_apply_neighborhood_trim_pgnode(con100): + im = con100.load_collection("S2") + result = im.apply_neighborhood( + process=PGNode("trim_cube", data={"from_parameter": "data"}), + size=[{'dimension': 'x', 'value': 128, 'unit': 'px'}, {'dimension': 'y', 'value': 128, 'unit': 'px'}] + ) + assert result.graph == load_json_resource('data/1.0.0/apply_neighborhood_trim.json') + +def test_apply_neighborhood_trim_callable(con100): + from openeo.processes.processes import trim_cube + im = con100.load_collection("S2") + result = im.apply_neighborhood( + process=trim_cube, + size=[{'dimension': 'x', 'value': 128, 'unit': 'px'}, {'dimension': 'y', 'value': 128, 'unit': 'px'}] + ) + assert result.graph == load_json_resource('data/1.0.0/apply_neighborhood_trim.json') + + +def test_apply_neighborhood_trim_lambda(con100): + im = con100.load_collection("S2") + result = im.apply_neighborhood( + process=lambda data: data.trim_cube(), + size=[{'dimension': 'x', 'value': 128, 'unit': 'px'}, {'dimension': 'y', 'value': 128, 'unit': 'px'}] + ) + assert result.graph == load_json_resource('data/1.0.0/apply_neighborhood_trim.json') + + +def test_apply_neighborhood_udf_callback(con100): def callback(data: ProcessBuilder): return data.run_udf(udf='myfancycode', runtime='Python') @@ -157,106 +194,72 @@ def test_apply_neighborhood_complex_callback(con100): } -def test_apply_dimension_bandmath(con100): - from openeo.processes.processes import array_element +def test_apply_dimension_max_str(con100): + im = con100.load_collection("S2") + res = im.apply_dimension(process="max", dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/apply_dimension_max.json') - collection = con100.load_collection("S2") - bandsum = collection.apply_dimension( + +def test_apply_dimension_max_pgnode(con100): + im = con100.load_collection("S2") + res = im.apply_dimension(process=PGNode("max", data={"from_parameter": "data"}), dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/apply_dimension_max.json') + + +def test_apply_dimension_max_callable(con100): + im = con100.load_collection("S2") + from openeo.processes.processes import max + res = im.apply_dimension(process=max, dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/apply_dimension_max.json') + + +def test_apply_dimension_max_lambda(con100): + im = con100.load_collection("S2") + res = im.apply_dimension(process=lambda data: data.max(), dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/apply_dimension_max.json') + + +def test_apply_dimension_bandmath_lambda(con100): + from openeo.processes.processes import array_element + im = con100.load_collection("S2") + res = im.apply_dimension( process=lambda d: array_element(d, index=1) + array_element(d, index=2), dimension="bands" ) + assert res.graph == load_json_resource('data/1.0.0/apply_dimension_bandmath.json') - actual_graph = bandsum.graph['applydimension1'] - assert actual_graph == { - 'process_id': 'apply_dimension', - 'arguments': { - 'data': {'from_node': 'loadcollection1'}, - 'dimension': 'bands', - 'process': {'process_graph': { - 'arrayelement1': { - 'process_id': 'array_element', - 'arguments': {'data': {'from_parameter': 'data'}, 'index': 1}, - }, - 'arrayelement2': { - 'process_id': 'array_element', - 'arguments': {'data': {'from_parameter': 'data'}, 'index': 2}, - }, - 'add1': { - 'process_id': 'add', - 'arguments': {'x': {'from_node': 'arrayelement1'}, 'y': {'from_node': 'arrayelement2'}}, - 'result': True - }, - }} - }, - 'result': True - } +def test_reduce_dimension_max_str(con100): + im = con100.load_collection("S2") + res = im.reduce_dimension(reducer="max", dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/reduce_dimension_max.json') -def test_reduce_dimension(con100): - collection = con100.load_collection("S2") - from openeo.processes.processes import array_element +def test_reduce_dimension_max_pgnode(con100): + im = con100.load_collection("S2") + res = im.reduce_dimension(reducer=PGNode("max", data={"from_parameter": "data"}), dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/reduce_dimension_max.json') - bandsum = collection.reduce_dimension( - dimension='bands', - reducer=lambda data: array_element(data, index=1) + array_element(data, index=2) - ) - actual_graph = bandsum.graph['reducedimension1'] - assert actual_graph == { - 'arguments': { - 'data': {'from_node': 'loadcollection1'}, - 'dimension': 'bands', - 'reducer': {'process_graph': { - 'arrayelement1': { - 'process_id': 'array_element', - 'arguments': {'data': {'from_parameter': 'data'}, 'index': 1}, - }, - 'arrayelement2': { - 'process_id': 'array_element', - 'arguments': {'data': {'from_parameter': 'data'}, 'index': 2}, - }, - 'add1': { - 'arguments': {'x': {'from_node': 'arrayelement1'}, 'y': {'from_node': 'arrayelement2'}}, - 'process_id': 'add', - 'result': True - }, - }}, - }, - 'process_id': 'reduce_dimension', - 'result': True} +def test_reduce_dimension_max_callable(con100): + im = con100.load_collection("S2") + from openeo.processes.processes import max + res = im.reduce_dimension(reducer=max, dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/reduce_dimension_max.json') -def test_apply_dimension(con100): - collection = con100.load_collection("S2") +def test_reduce_dimension_max_lambda(con100): + im = con100.load_collection("S2") + res = im.reduce_dimension(reducer=lambda data: data.max(), dimension="bands") + assert res.graph == load_json_resource('data/1.0.0/reduce_dimension_max.json') - from openeo.processes.processes import array_element - bandsum = collection.apply_dimension( - dimension='bands', - process=lambda data: array_element(data, index=1) + array_element(data, index=2) +def test_reduce_dimension_bandmath_lambda(con100): + from openeo.processes.processes import array_element + collection = con100.load_collection("S2") + im = con100.load_collection("S2") + res = collection.reduce_dimension( + reducer=lambda data: array_element(data, index=1) + array_element(data, index=2), + dimension='bands' ) - - actual_graph = bandsum.graph['applydimension1'] - assert actual_graph == { - 'process_id': 'apply_dimension', - 'arguments': { - 'data': {'from_node': 'loadcollection1'}, - 'dimension': 'bands', - 'process': {'process_graph': { - 'arrayelement1': { - 'process_id': 'array_element', - 'arguments': {'data': {'from_parameter': 'data'}, 'index': 1}, - }, - 'arrayelement2': { - 'process_id': 'array_element', - 'arguments': {'data': {'from_parameter': 'data'}, 'index': 2}, - }, - 'add1': { - 'process_id': 'add', - 'arguments': {'x': {'from_node': 'arrayelement1'}, 'y': {'from_node': 'arrayelement2'}}, - 'result': True - }, - }} - }, - 'result': True} + assert res.graph == load_json_resource('data/1.0.0/reduce_dimension_bandmath.json') From 357705e5081ca8452b98e373503ac6908d45aefb Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Sat, 12 Sep 2020 00:32:33 +0200 Subject: [PATCH 6/9] EP-3555 fix callback handling of merge_cubes --- openeo/processes/builder.py | 6 +- openeo/rest/datacube.py | 77 +++++++++++---------- tests/data/1.0.0/merge_cubes_add.json | 46 +++++++++++++ tests/data/1.0.0/merge_cubes_multiple.json | 8 +-- tests/data/1.0.0/merge_cubes_or.json | 4 +- tests/rest/datacube/test_processbuilder.py | 79 +++++++++++++++++++++- 6 files changed, 171 insertions(+), 49 deletions(-) create mode 100644 tests/data/1.0.0/merge_cubes_add.json diff --git a/openeo/processes/builder.py b/openeo/processes/builder.py index 1ef81daa3..81f8e037c 100644 --- a/openeo/processes/builder.py +++ b/openeo/processes/builder.py @@ -13,13 +13,9 @@ class ProcessBuilderBase: # TODO: can this implementation be merged with PGNode directly? - def __init__(self, pgnode: Union[PGNode, dict]): + def __init__(self, pgnode: Union[PGNode, dict, list]): self.pgnode = pgnode - @classmethod - def from_parameter(cls, parameter: str): - return cls({"from_parameter": parameter}) - @classmethod def process(cls, process_id: str, arguments: dict = None, **kwargs): """ diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index 023fceee7..77b40600a 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -8,6 +8,7 @@ """ import datetime +import inspect import logging import pathlib import typing @@ -28,6 +29,7 @@ from openeo.rest.udp import RESTUserDefinedProcess from openeo.util import get_temporal_extent, dict_no_none from openeo.vectorcube import VectorCube +import openeo.processes.processes from openeo.metadata import Band import numpy from builtins import staticmethod @@ -149,7 +151,7 @@ def load_collection( arguments['bands'] = bands if properties: arguments['properties'] = { - prop: cls._get_callback(pred, parameter_mapping={"x": "value"}) + prop: cls._get_callback(pred, parent_parameters=["value"]) for prop, pred in properties.items() } pg = PGNode( @@ -459,8 +461,8 @@ def _merge_operator_binary_cubes(self, operator: str, other: 'DataCube', left_ar return self.merge(other, overlap_resolver=PGNode( process_id=operator, arguments={ - left_arg_name: {"from_parameter": "cube1"}, - right_arg_name: {"from_parameter": "cube2"}, + left_arg_name: {"from_parameter": "x"}, + right_arg_name: {"from_parameter": "y"}, } )) @@ -495,7 +497,7 @@ def zonal_statistics(self, regions, func, scale=1000, interval="day") -> 'DataCu return self.process(process_id, args) @staticmethod - def _get_callback(process: Union[str, PGNode, typing.Callable], parameter_mapping: dict) -> dict: + def _get_callback(process: Union[str, PGNode, typing.Callable], parent_parameters: List[str]) -> dict: """ Build a "callback" process: a user defined process that is used by another process (such as `apply`, `apply_dimension`, `reduce`, ....) @@ -505,36 +507,39 @@ def _get_callback(process: Union[str, PGNode, typing.Callable], parameter_mappin :return: """ - def get_args(parameter_mapping: dict, factory: typing.Callable) -> dict: - """Helper to build a dict of process arguments (in desired format).""" - args = {} - for child_param, parent_param in parameter_mapping.items(): - if isinstance(parent_param, str): - args[child_param] = factory(parent_param) - elif isinstance(parent_param, list): - args[child_param] = [factory(p) for p in parent_param] - else: - raise ValueError(parent_param) - return args + def get_parameter_names(process: typing.Callable) -> List[str]: + signature = inspect.signature(process) + return [ + p.name for p in signature.parameters.values() + if p.kind in (inspect.Parameter.POSITIONAL_ONLY, inspect.Parameter.POSITIONAL_OR_KEYWORD) + ] # TODO: autodetect the parameters defined by process? - if isinstance(process, str): - # Assume given reducer is a simple predefined reduce process_id - pg = PGNode( - process_id=process, - arguments=get_args(parameter_mapping, factory=lambda p: {"from_parameter": p}) - ) - elif isinstance(process, PGNode): + if isinstance(process, PGNode): # Assume this is already a valid callback process - # TODO: check used parameters against expected ones? - pg=process + pg = process + elif isinstance(process, str): + # Assume given reducer is a simple predefined reduce process_id + if process in openeo.processes.processes.__dict__: + process_params = get_parameter_names(openeo.processes.processes.__dict__[process]) + else: + # Best effort guess + process_params = parent_parameters + if parent_parameters == ["x", "y"] and (len(process_params) == 1 or process_params[:1] == ["data"]): + # Special case: wrap all parent parameters in an array + arguments = {process_params[0]: [{"from_parameter": p} for p in parent_parameters]} + else: + arguments = {a: {"from_parameter": b} for a, b in zip(process_params, parent_parameters)} + pg = PGNode(process_id=process, arguments=arguments) elif isinstance(process, typing.Callable): - args = get_args(parameter_mapping, factory=lambda p: ProcessBuilder.from_parameter(p)) - # Only call with kwargs to avoid being picky towards user about argument names when there is no confusion. - if len(args) < 2: - pg = process(*args.values()).pgnode + process_params = get_parameter_names(process) + if parent_parameters == ["x", "y"] and (len(process_params) == 1 or process_params[:1] == ["data"]): + # Special case: wrap all parent parameters in an array + arguments = [ProcessBuilder([{"from_parameter": p} for p in parent_parameters])] else: - pg = process(**args).pgnode + arguments = [ProcessBuilder({"from_parameter": p}) for p in parent_parameters] + + pg = process(*arguments).pgnode else: raise ValueError(process) @@ -582,7 +587,7 @@ def apply_dimension( process = PGNode.to_process_graph_argument(callback_process_node) elif code or process: # TODO EP-3555 unify `code` and `process` - process = self._get_callback(code or process, parameter_mapping={"data": "data"}) + process = self._get_callback(code or process, parent_parameters=["data"]) else: raise OpenEoClientException("No UDF code or process given") arguments = { @@ -609,7 +614,7 @@ def reduce_dimension( """ # TODO: check if dimension is valid according to metadata? #116 # TODO: #125 use/test case for `reduce_dimension_binary`? - reducer = self._get_callback(reducer, parameter_mapping={"data": "data"}) + reducer = self._get_callback(reducer, parent_parameters=["data"]) return self.process_with_node(ReduceNode( process_id=process_id, @@ -699,13 +704,13 @@ def apply_neighborhood( process_id='apply_neighborhood', arguments=dict_no_none( data=THIS, - process=self._get_callback(process, parameter_mapping={"data": "data"}), + process=self._get_callback(process, parent_parameters=["data"]), size=size, overlap=overlap ) ) - def apply(self, process: Union[str, PGNode, typing.Callable] = None, data_argument="x") -> 'DataCube': + def apply(self, process: Union[str, PGNode, typing.Callable] = None) -> 'DataCube': """ Applies a unary process (a local operation) to each value of the specified or all dimensions in the data cube. @@ -717,7 +722,7 @@ def apply(self, process: Union[str, PGNode, typing.Callable] = None, data_argume process_id="apply", arguments={ "data": THIS, - "process": self._get_callback(process, parameter_mapping={data_argument: "x"}), + "process": self._get_callback(process, parent_parameters=["x"]), # TODO #125 context } ) @@ -940,7 +945,7 @@ def merge_cubes( 'cube2': {'from_node': other._pg}, } if overlap_resolver: - arguments["overlap_resolver"] = self._get_callback(overlap_resolver, parameter_mapping={"data": ["x", "y"]}) + arguments["overlap_resolver"] = self._get_callback(overlap_resolver, parent_parameters=["x", "y"]) # TODO #125 context # TODO: set metadata of reduced cube? return self.process(process_id="merge_cubes", arguments=arguments) @@ -1061,7 +1066,7 @@ def _polygonal_timeseries( arguments={ "data": self._pg, "geometries": geometries, - "reducer": self._get_callback(func, parameter_mapping={"data":"data"}) + "reducer": self._get_callback(func, parent_parameters=["data"]) # TODO #125 target dimension, context } ) diff --git a/tests/data/1.0.0/merge_cubes_add.json b/tests/data/1.0.0/merge_cubes_add.json new file mode 100644 index 000000000..3e8107989 --- /dev/null +++ b/tests/data/1.0.0/merge_cubes_add.json @@ -0,0 +1,46 @@ +{ + "loadcollection1": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "S2", + "temporal_extent": null + } + }, + "loadcollection2": { + "process_id": "load_collection", + "arguments": { + "spatial_extent": null, + "id": "MASK", + "temporal_extent": null + } + }, + "mergecubes1": { + "process_id": "merge_cubes", + "arguments": { + "cube1": { + "from_node": "loadcollection1" + }, + "cube2": { + "from_node": "loadcollection2" + }, + "overlap_resolver": { + "process_graph": { + "add1": { + "process_id": "add", + "arguments": { + "x": { + "from_parameter": "x" + }, + "y": { + "from_parameter": "y" + } + }, + "result": true + } + } + } + }, + "result": true + } +} \ No newline at end of file diff --git a/tests/data/1.0.0/merge_cubes_multiple.json b/tests/data/1.0.0/merge_cubes_multiple.json index da77f3ba0..586794585 100644 --- a/tests/data/1.0.0/merge_cubes_multiple.json +++ b/tests/data/1.0.0/merge_cubes_multiple.json @@ -57,10 +57,10 @@ "process_id": "add", "arguments": { "x": { - "from_parameter": "cube1" + "from_parameter": "x" }, "y": { - "from_parameter": "cube2" + "from_parameter": "y" } }, "result": true @@ -84,10 +84,10 @@ "process_id": "add", "arguments": { "x": { - "from_parameter": "cube1" + "from_parameter": "x" }, "y": { - "from_parameter": "cube2" + "from_parameter": "y" } }, "result": true diff --git a/tests/data/1.0.0/merge_cubes_or.json b/tests/data/1.0.0/merge_cubes_or.json index 2d12a83ed..753042576 100644 --- a/tests/data/1.0.0/merge_cubes_or.json +++ b/tests/data/1.0.0/merge_cubes_or.json @@ -110,10 +110,10 @@ "process_id": "or", "arguments": { "x": { - "from_parameter": "cube1" + "from_parameter": "x" }, "y": { - "from_parameter": "cube2" + "from_parameter": "y" } }, "result": true diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index 2ded8ace2..a0587c5a5 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -4,6 +4,18 @@ from ... import load_json_resource +def test_apply_callback_absolute_str(con100): + im = con100.load_collection("S2") + result = im.apply("absolute") + assert result.graph == load_json_resource('data/1.0.0/apply_absolute.json') + + +def test_apply_callback_absolute_pgnode(con100): + im = con100.load_collection("S2") + result = im.apply(PGNode("absolute", x={"from_parameter": "x"})) + assert result.graph == load_json_resource('data/1.0.0/apply_absolute.json') + + def test_apply_callback_absolute_lambda_method(con100): im = con100.load_collection("S2") result = im.apply(lambda data: data.absolute()) @@ -256,10 +268,73 @@ def test_reduce_dimension_max_lambda(con100): def test_reduce_dimension_bandmath_lambda(con100): from openeo.processes.processes import array_element - collection = con100.load_collection("S2") im = con100.load_collection("S2") - res = collection.reduce_dimension( + res = im.reduce_dimension( reducer=lambda data: array_element(data, index=1) + array_element(data, index=2), dimension='bands' ) assert res.graph == load_json_resource('data/1.0.0/reduce_dimension_bandmath.json') + + +def test_merge_cubes_add_str(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + res = im1.merge_cubes(other=im2, overlap_resolver="add") + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_add.json') + + +def test_merge_cubes_add_pgnode(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + res = im1.merge_cubes( + other=im2, + overlap_resolver=PGNode("add", x={"from_parameter": "x"}, y={"from_parameter": "y"}) + ) + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_add.json') + + +def test_merge_cubes_add_callable(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + from openeo.processes.processes import add + res = im1.merge_cubes(other=im2, overlap_resolver=add) + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_add.json') + + +def test_merge_cubes_add_lambda(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + res = im1.merge_cubes(other=im2, overlap_resolver=lambda x, y: x + y) + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_add.json') + + +def test_merge_cubes_max_str(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + res = im1.merge_cubes(other=im2, overlap_resolver="max") + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_max.json') + + +def test_merge_cubes_max_pgnode(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + res = im1.merge_cubes( + other=im2, + overlap_resolver=PGNode("max", data=[{"from_parameter": "x"}, {"from_parameter": "y"}]) + ) + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_max.json') + + +def test_merge_cubes_max_callable(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + from openeo.processes.processes import max + res = im1.merge_cubes(other=im2, overlap_resolver=max) + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_max.json') + + +def test_merge_cubes_max_lambda(con100): + im1 = con100.load_collection("S2") + im2 = con100.load_collection("MASK") + res = im1.merge_cubes(other=im2, overlap_resolver=lambda data: data.max()) + assert res.graph == load_json_resource('data/1.0.0/merge_cubes_max.json') From 31b899ac83f4ec1c8a11623caffcc953400e6ec1 Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Mon, 14 Sep 2020 10:39:32 +0200 Subject: [PATCH 7/9] EP-3555 fix handling of processes with "python keyword" name --- openeo/processes/generator.py | 9 ++++--- openeo/processes/processes.py | 8 +++---- tests/processes/test_generator.py | 40 +++++++++++++++++++++++++++++++ 3 files changed, 50 insertions(+), 7 deletions(-) diff --git a/openeo/processes/generator.py b/openeo/processes/generator.py index e9faa4dcb..6ddd47751 100644 --- a/openeo/processes/generator.py +++ b/openeo/processes/generator.py @@ -1,4 +1,5 @@ import argparse +import keyword import sys import textwrap from pathlib import Path @@ -36,7 +37,9 @@ def render_process(self, process: Process, prefix: str = None, width: int = DEFA call_args = ", ".join( ["{p}={a}".format(p=p, a=a) for (p, a) in zip(self._par_names(process), self._arg_names(process))] ) - body = self.indent + self.body_template.format(id=self._safe_name(process.id), args=call_args) + body = self.indent + self.body_template.format( + id=process.id, safe_name=self._safe_name(process.id), args=call_args + ) return textwrap.indent("\n".join([ def_line, @@ -45,7 +48,7 @@ def render_process(self, process: Process, prefix: str = None, width: int = DEFA ]), prefix=prefix) def _safe_name(self, name: str) -> str: - if name in {'and', 'or', 'if', 'not'}: + if keyword.iskeyword(name): name += '_' return name @@ -128,7 +131,7 @@ def __pow__(self, other) -> 'ProcessBuilder': ) oo_renderer = PythonRenderer( oo_mode=True, - body_template="return {id}({args})", + body_template="return {safe_name}({args})", optional_default="UNSET", return_type_hint="'ProcessBuilder'" ) diff --git a/openeo/processes/processes.py b/openeo/processes/processes.py index 32db9740c..95bdcdee1 100644 --- a/openeo/processes/processes.py +++ b/openeo/processes/processes.py @@ -1942,7 +1942,7 @@ def and_(x, y) -> ProcessBuilder: :return: Boolean result of the logical AND. """ - return process('and_', x=x, y=y) + return process('and', x=x, y=y) def anomaly(data, normals, period) -> ProcessBuilder: @@ -2691,7 +2691,7 @@ def if_(value, accept, reject=UNSET) -> ProcessBuilder: :return: Either the `accept` or `reject` argument depending on the given boolean value. """ - return process('if_', value=value, accept=accept, reject=reject) + return process('if', value=value, accept=accept, reject=reject) def int(x) -> ProcessBuilder: @@ -3074,7 +3074,7 @@ def not_(x) -> ProcessBuilder: :return: Inverted boolean value. """ - return process('not_', x=x) + return process('not', x=x) def or_(x, y) -> ProcessBuilder: @@ -3086,7 +3086,7 @@ def or_(x, y) -> ProcessBuilder: :return: Boolean result of the logical OR. """ - return process('or_', x=x, y=y) + return process('or', x=x, y=y) def order(data, asc=UNSET, nodata=UNSET) -> ProcessBuilder: diff --git a/tests/processes/test_generator.py b/tests/processes/test_generator.py index f04f55512..5395e8309 100644 --- a/tests/processes/test_generator.py +++ b/tests/processes/test_generator.py @@ -145,3 +145,43 @@ def pi(self): :return: value of pi """ return process('pi', )''') + + +def test_render_keyword(): + process = Process.from_dict({ + "id": "or", + "description": "Boolean and", + "summary": "Boolean and", + "parameters": [ + {"name": "x", "description": "value", "schema": {"type": ["boolean", "null"]}}, + {"name": "y", "description": "value", "schema": {"type": ["boolean", "null"]}} + ], + "returns": {"description": "result", "schema": {"type": ["boolean", "null"]}}, + }) + renderer = PythonRenderer() + src = renderer.render_process(process) + assert src == dedent('''\ + def or_(x, y): + """ + Boolean and + + :param x: value + :param y: value + + :return: result + """ + return process('or', x=x, y=y)''') + + oo_renderer = PythonRenderer(oo_mode=True, body_template="return {safe_name}({args})", ) + src = oo_renderer.render_process(process) + assert dedent(src) == dedent('''\ + def or_(self, y): + """ + Boolean and + + :param self: value + :param y: value + + :return: result + """ + return or_(x=self, y=y)''') From c2a5de8dd8773f02c5c4db13a51e0f6e7cfec2f6 Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Mon, 14 Sep 2020 15:54:37 +0200 Subject: [PATCH 8/9] EP-3555 add more documentation about callbacks --- docs/basics.rst | 27 --- docs/processes.rst | 198 +++++++++++++++++++++ tests/rest/datacube/test_processbuilder.py | 10 ++ 3 files changed, 208 insertions(+), 27 deletions(-) diff --git a/docs/basics.rst b/docs/basics.rst index 8efa10b68..2ee27eea0 100644 --- a/docs/basics.rst +++ b/docs/basics.rst @@ -157,30 +157,3 @@ It can easily be converted into a pandas dataframe: The same method also works for multiple polygons, or GeoJSON or SHP files that are accessible by the backend. This allows computing aggregated values over very large areas. - -Some openEO process allow you to specify a process to be invoked on a subset of the datacube. This library allows this by specifying -a 'callback' function. - - -.. _callbackfunctions: - -Callback functions ------------------- - -A callback function is created by defining an actual Python function, or a lambda expression. This function is then passed on -to processes such as :py:meth:`openeo.rest.datacube.DataCube.apply`, :py:meth:`openeo.rest.datacube.DataCube.apply_dimension`, :py:meth:`openeo.rest.datacube.DataCube.apply_neighborhood`, -and :py:meth:`openeo.rest.datacube.DataCube.reduce_dimension`. - -This is an example: - -.. code-block:: python - - datacube_absolute = cube.apply(process=lambda data:absolute(data)) - -For more complex operations, you can define a function: - -.. code-block:: python - - def callback(data): - return absolute(data) - diff --git a/docs/processes.rst b/docs/processes.rst index 5286176cb..418f18f6e 100644 --- a/docs/processes.rst +++ b/docs/processes.rst @@ -240,3 +240,201 @@ The parameter listing of the example above could be written like this:: ] + +.. _callbackfunctions: + +Processes with "callbacks" +========================== + +Some openEO processes expect some kind of sub-process +to be invoked on a subset or slice of the datacube. +For example: + +* process ``apply`` requires a transformation that will be applied + to each pixel in the cube (separately) +* process ``reduce_dimension`` requires an aggregation function to convert + an array of pixel values (along a given dimension) to a single value +* process ``apply_neighborhood`` requires a function to transform a small + "neighborhood" cube to another + +These transformation functions are usually called "**callbacks**" +because instead of being called explicitly by the user, +they are called by their "parent" process +(the ``apply``, ``reduce_dimension`` and ``apply_neighborhood`` in the examples) + + +The openEO Python Client Library currently provides a couple of functions +that expect a callback, including: +:py:meth:`openeo.rest.datacube.DataCube.apply`, +:py:meth:`openeo.rest.datacube.DataCube.apply_dimension`, +:py:meth:`openeo.rest.datacube.DataCube.apply_neighborhood`, +:py:meth:`openeo.rest.datacube.DataCube.merge_cubes`, +:py:meth:`openeo.rest.datacube.DataCube.reduce_dimension`, +and :py:meth:`openeo.rest.datacube.DataCube.load_collection`. +These functions support several ways to specify the desired callback. + + +Callback as string +------------------ + +The easiest way is passing a process name as a string, +for example: + +.. code-block:: python + + # Take the absolute value of each pixel + cube.apply("absolute") + + # Reduce a cube along the temporal dimension by taking the maximum value + cube.reduce_dimension("max", dimension="t") + +This approach is only possible if the desired transformation is available +as a single process. If not, use one of the methods below. + +Also important is that the "signature" of the provided callback process +should correspond properly with what the parent process expects. +For example: ``apply`` requires a callback process that receives a +number and returns one (like ``absolute`` or ``sqrt``), +while ``reduce_dimension`` requires a callback process that receives +an array of numbers and returns a single number (like ``max`` or ``mean``). + + +Callback as a callable +----------------------- + +You can also specify the callback as a "callable": +a Python object that can be called (e.g. a function without parenthesis). + +The openEO Python Client Library defines the +official processes in the :py:mod:`openeo.process.processes` module, +which can be used directly: + +.. code-block:: python + + from openeo.processes.processes import absolute, max + + cube.apply(absolute) + cube.reduce_dimension(max, dimension="t") + +You can also use ``lambda`` functions: + +.. code-block:: python + + cube.apply(lambda x: x * 2 + 3) + + +or normal Python functions: + +.. code-block:: python + + from openeo.processes.processes import array_element + + def my_bandmath(data): + band1 = array_element(data, index=1) + band1 = array_element(data, index=1) + return band1 + 1.2 * band 2 + + + cube.reduce_dimension(my_bandmath, dimension="bands") + + +The argument that is passed to these functions is +an instance of :py:class:`openeo.processes.processes.ProcessBuilder`. +This is a helper object with predefined methods for all standard processes, +allowing to use an object oriented coding style to define the callback. +For example: + +.. code-block:: python + + from openeo.processes.processes import ProcessBuilder + + def avg(data: ProcessBuilder): + return data.mean() + + cube.reduce_dimension(avg, dimension="t") + + +These methods also return ``ProcessBuilder`` objects, +which also allows writing callbacks in chained fashion: + +.. code-block:: python + + cube.apply(lambda x: x.absolute().cos().add(y=1.23)) + + +All this gives a lot of flexibility to define callbacks compactly +in a desired coding style. +The following examples result in the same callback: + +.. code-block:: python + + from openeo.processes.processes import ProcessBuilder, mean, cos, add + + # Chained methods + cube.reduce_dimension( + lambda data: data.mean().cos().add(y=1.23), + dimension="t" + ) + + # Functions + cube.reduce_dimension( + lambda data: add(x=cos(mean(data)), y=1.23), + dimension="t" + ) + + # Mixing methods, functions and operators + cube.reduce_dimension( + lambda data: cos(data.mean())) + 1.23, + dimension="t" + ) + + +Caveats +```````` + +Specifying callbacks through Python functions (or lambdas) +looks intuitive and straightforward, but it should be noted +that not everything is allowed in these functions. +You should just limit yourself to calling +:py:mod:`openeo.process.processes` functions, :py:class:`openeo.processes.processes.ProcessBuilder` methods and basic math operators. +Don't call functions from other libraries like numpy or scipy. +Don't use Python control flow statements like ``if/else`` constructs +or ``for`` loops. + +The reason for this is that the openEO Python Client Library +does not translate the function source code itself +to an openEO process graph. +Instead, when building the openEO process graph, +it passes a special object to the function +and keeps track of which :py:mod:`openeo.process.processes` functions +were called to assemble the corresponding process graph. +If you use control flow statements or use numpy functions for example, +this procedure will incorrectly detect what you want to do in the callback. + + +Callback as ``PGNode`` +----------------------- + +You can also pass a ``PGNode`` object as callback. +This method is used internally and could be useful for more +advanced use cases, but it requires more in-depth knowledge of +the openEO API and openEO Python Client Library to construct correctly. +Some examples: + +.. code-block:: python + + from openeo.internal.graph_building import PGNode + + cube.apply(PGNode( + "add", + x=PGNode( + "cos", + x=PGNode("absolute", x={"from_parameter": "x"}) + ), + y=1.23 + )) + + cube.reduce_dimension( + reducer=PGNode("max", data={"from_parameter": "data"}), + dimension="bands" + ) diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index a0587c5a5..a7465a882 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -89,6 +89,16 @@ def transform(x: ProcessBuilder) -> ProcessBuilder: assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') +def test_apply_callback_chain_pgnode(con100): + im = con100.load_collection("S2") + result = im.apply(PGNode( + "add", + x=PGNode("cos", x=PGNode("absolute", x={"from_parameter": "x"})), + y=1.23 + )) + assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') + + def test_apply_callback_math_lambda(con100): im = con100.load_collection("S2") result = im.apply(lambda data: (((data + 1) - 2) * 3) / 4) From 0fc03ab509092435d39eff5e98df20a7e6f2a98e Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Thu, 17 Sep 2020 12:09:01 +0200 Subject: [PATCH 9/9] EP-3555 restructure pubic processes.py vs internal helper modules --- docs/processes.rst | 12 ++++---- openeo/{ => internal}/processes/__init__.py | 0 openeo/{ => internal}/processes/builder.py | 0 openeo/{ => internal}/processes/generator.py | 4 +-- openeo/{ => internal}/processes/parse.py | 0 openeo/{processes => }/processes.py | 2 +- openeo/rest/datacube.py | 10 +++---- tests/{ => internal}/processes/__init__.py | 0 .../processes/test_generator.py | 4 +-- tests/{ => internal}/processes/test_parse.py | 2 +- tests/rest/datacube/test_datacube100.py | 2 +- tests/rest/datacube/test_processbuilder.py | 28 +++++++++---------- 12 files changed, 32 insertions(+), 32 deletions(-) rename openeo/{ => internal}/processes/__init__.py (100%) rename openeo/{ => internal}/processes/builder.py (100%) rename openeo/{ => internal}/processes/generator.py (97%) rename openeo/{ => internal}/processes/parse.py (100%) rename openeo/{processes => }/processes.py (99%) rename tests/{ => internal}/processes/__init__.py (100%) rename tests/{ => internal}/processes/test_generator.py (97%) rename tests/{ => internal}/processes/test_parse.py (97%) diff --git a/docs/processes.rst b/docs/processes.rst index 418f18f6e..20a9cf285 100644 --- a/docs/processes.rst +++ b/docs/processes.rst @@ -311,7 +311,7 @@ which can be used directly: .. code-block:: python - from openeo.processes.processes import absolute, max + from openeo.processes import absolute, max cube.apply(absolute) cube.reduce_dimension(max, dimension="t") @@ -327,7 +327,7 @@ or normal Python functions: .. code-block:: python - from openeo.processes.processes import array_element + from openeo.processes import array_element def my_bandmath(data): band1 = array_element(data, index=1) @@ -339,14 +339,14 @@ or normal Python functions: The argument that is passed to these functions is -an instance of :py:class:`openeo.processes.processes.ProcessBuilder`. +an instance of :py:class:`openeo.processes.ProcessBuilder`. This is a helper object with predefined methods for all standard processes, allowing to use an object oriented coding style to define the callback. For example: .. code-block:: python - from openeo.processes.processes import ProcessBuilder + from openeo.processes import ProcessBuilder def avg(data: ProcessBuilder): return data.mean() @@ -368,7 +368,7 @@ The following examples result in the same callback: .. code-block:: python - from openeo.processes.processes import ProcessBuilder, mean, cos, add + from openeo.processes import ProcessBuilder, mean, cos, add # Chained methods cube.reduce_dimension( @@ -396,7 +396,7 @@ Specifying callbacks through Python functions (or lambdas) looks intuitive and straightforward, but it should be noted that not everything is allowed in these functions. You should just limit yourself to calling -:py:mod:`openeo.process.processes` functions, :py:class:`openeo.processes.processes.ProcessBuilder` methods and basic math operators. +:py:mod:`openeo.process.processes` functions, :py:class:`openeo.processes.ProcessBuilder` methods and basic math operators. Don't call functions from other libraries like numpy or scipy. Don't use Python control flow statements like ``if/else`` constructs or ``for`` loops. diff --git a/openeo/processes/__init__.py b/openeo/internal/processes/__init__.py similarity index 100% rename from openeo/processes/__init__.py rename to openeo/internal/processes/__init__.py diff --git a/openeo/processes/builder.py b/openeo/internal/processes/builder.py similarity index 100% rename from openeo/processes/builder.py rename to openeo/internal/processes/builder.py diff --git a/openeo/processes/generator.py b/openeo/internal/processes/generator.py similarity index 97% rename from openeo/processes/generator.py rename to openeo/internal/processes/generator.py index 6ddd47751..59c828dbe 100644 --- a/openeo/processes/generator.py +++ b/openeo/internal/processes/generator.py @@ -5,7 +5,7 @@ from pathlib import Path from typing import Union, List, Iterator -from openeo.processes.parse import Process, parse_all_from_dir +from openeo.internal.processes.parse import Process, parse_all_from_dir class PythonRenderer: @@ -148,7 +148,7 @@ def __pow__(self, other) -> 'ProcessBuilder': def main(): # Usage example (from project root, assuming the `openeo-process` repo is checked out as well): - # python openeo/processes//generator.py ../openeo-processes --output openeo/processes/processes.py + # python openeo/internal/processes/generator.py ../openeo-processes --output openeo/processes.py arg_parser = argparse.ArgumentParser() arg_parser.add_argument("dir", help="""Directory that holds openEO process definitions in JSON format""") arg_parser.add_argument("--output", help="Path to output 'processes.py' file") diff --git a/openeo/processes/parse.py b/openeo/internal/processes/parse.py similarity index 100% rename from openeo/processes/parse.py rename to openeo/internal/processes/parse.py diff --git a/openeo/processes/processes.py b/openeo/processes.py similarity index 99% rename from openeo/processes/processes.py rename to openeo/processes.py index 95bdcdee1..827f17b6b 100644 --- a/openeo/processes/processes.py +++ b/openeo/processes.py @@ -2,7 +2,7 @@ # This file is automatically generated. # Do not edit directly. -from openeo.processes.builder import ProcessBuilderBase, UNSET +from openeo.internal.processes.builder import ProcessBuilderBase, UNSET class ProcessBuilder(ProcessBuilderBase): diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index 77b40600a..4d3d7f760 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -18,19 +18,19 @@ import shapely.geometry import shapely.geometry.base from deprecated import deprecated -from openeo.processes.processes import ProcessBuilder from shapely.geometry import Polygon, MultiPolygon, mapping import openeo +import openeo.processes from openeo.imagecollection import ImageCollection, CollectionMetadata from openeo.internal.graph_building import PGNode, ReduceNode +from openeo.metadata import Band +from openeo.processes import ProcessBuilder from openeo.rest import BandMathException, OperatorException, OpenEoClientException from openeo.rest.job import RESTJob from openeo.rest.udp import RESTUserDefinedProcess from openeo.util import get_temporal_extent, dict_no_none from openeo.vectorcube import VectorCube -import openeo.processes.processes -from openeo.metadata import Band import numpy from builtins import staticmethod @@ -520,8 +520,8 @@ def get_parameter_names(process: typing.Callable) -> List[str]: pg = process elif isinstance(process, str): # Assume given reducer is a simple predefined reduce process_id - if process in openeo.processes.processes.__dict__: - process_params = get_parameter_names(openeo.processes.processes.__dict__[process]) + if process in openeo.processes.__dict__: + process_params = get_parameter_names(openeo.processes.__dict__[process]) else: # Best effort guess process_params = parent_parameters diff --git a/tests/processes/__init__.py b/tests/internal/processes/__init__.py similarity index 100% rename from tests/processes/__init__.py rename to tests/internal/processes/__init__.py diff --git a/tests/processes/test_generator.py b/tests/internal/processes/test_generator.py similarity index 97% rename from tests/processes/test_generator.py rename to tests/internal/processes/test_generator.py index 5395e8309..dff5479d5 100644 --- a/tests/processes/test_generator.py +++ b/tests/internal/processes/test_generator.py @@ -1,7 +1,7 @@ from textwrap import dedent -from openeo.processes.generator import PythonRenderer -from openeo.processes.parse import Process +from openeo.internal.processes.generator import PythonRenderer +from openeo.internal.processes.parse import Process def test_render_basic(): diff --git a/tests/processes/test_parse.py b/tests/internal/processes/test_parse.py similarity index 97% rename from tests/processes/test_parse.py rename to tests/internal/processes/test_parse.py index 3753b5d8a..dd6f4e412 100644 --- a/tests/processes/test_parse.py +++ b/tests/internal/processes/test_parse.py @@ -1,4 +1,4 @@ -from openeo.processes.parse import Parameter, Schema, Returns, Process +from openeo.internal.processes.parse import Parameter, Schema, Returns, Process def test_schema(): diff --git a/tests/rest/datacube/test_datacube100.py b/tests/rest/datacube/test_datacube100.py index d175fc31d..758bb6976 100644 --- a/tests/rest/datacube/test_datacube100.py +++ b/tests/rest/datacube/test_datacube100.py @@ -280,7 +280,7 @@ def between(min, max) -> PGNode: def test_load_collection_properties_process_builder_function(con100): - from openeo.processes.processes import between, eq + from openeo.processes import between, eq im = con100.load_collection( "S2", spatial_extent={"west": 16.1, "east": 16.6, "north": 48.6, "south": 47.2}, diff --git a/tests/rest/datacube/test_processbuilder.py b/tests/rest/datacube/test_processbuilder.py index a7465a882..1a84f032a 100644 --- a/tests/rest/datacube/test_processbuilder.py +++ b/tests/rest/datacube/test_processbuilder.py @@ -1,5 +1,5 @@ from openeo.internal.graph_building import PGNode -from openeo.processes.processes import ProcessBuilder +from openeo.processes import ProcessBuilder from ... import load_json_resource @@ -24,7 +24,7 @@ def test_apply_callback_absolute_lambda_method(con100): def test_apply_callback_absolute_function(con100): im = con100.load_collection("S2") - from openeo.processes.processes import absolute + from openeo.processes import absolute result = im.apply(absolute) assert result.graph == load_json_resource('data/1.0.0/apply_absolute.json') @@ -46,14 +46,14 @@ def test_apply_callback_chain_lambda_method(con100): def test_apply_callback_chain_lambda_functions(con100): im = con100.load_collection("S2") - from openeo.processes.processes import absolute, cos, add + from openeo.processes import absolute, cos, add result = im.apply(lambda data: add(cos(absolute(data)), 1.23)) assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') def test_apply_callback_chain_lambda_mixed_and_operator(con100): im = con100.load_collection("S2") - from openeo.processes.processes import cos + from openeo.processes import cos result = im.apply(lambda data: cos(data.absolute()) + 1.23) assert result.graph == load_json_resource('data/1.0.0/apply_chain.json') @@ -68,7 +68,7 @@ def transform(x: ProcessBuilder) -> ProcessBuilder: def test_apply_callback_chain_custom_function_functions(con100): - from openeo.processes.processes import absolute, cos, add + from openeo.processes import absolute, cos, add def transform(x: ProcessBuilder) -> ProcessBuilder: return add(cos(absolute(x)), y=1.23) @@ -79,7 +79,7 @@ def transform(x: ProcessBuilder) -> ProcessBuilder: def test_apply_callback_chain_custom_function_mixed_and_operator(con100): - from openeo.processes.processes import cos + from openeo.processes import cos def transform(x: ProcessBuilder) -> ProcessBuilder: return cos(x.absolute()) + 1.23 @@ -133,7 +133,7 @@ def test_apply_neighborhood_trim_pgnode(con100): def test_apply_neighborhood_trim_callable(con100): - from openeo.processes.processes import trim_cube + from openeo.processes import trim_cube im = con100.load_collection("S2") result = im.apply_neighborhood( process=trim_cube, @@ -187,7 +187,7 @@ def callback(data: ProcessBuilder): def test_apply_neighborhood_complex_callback(con100): collection = con100.load_collection("S2") - from openeo.processes.processes import max + from openeo.processes import max neighbors = collection.apply_neighborhood(process=lambda data: max(data).absolute(), size=[ {'dimension': 'x', 'value': 128, 'unit': 'px'}, {'dimension': 'y', 'value': 128, 'unit': 'px'} @@ -230,7 +230,7 @@ def test_apply_dimension_max_pgnode(con100): def test_apply_dimension_max_callable(con100): im = con100.load_collection("S2") - from openeo.processes.processes import max + from openeo.processes import max res = im.apply_dimension(process=max, dimension="bands") assert res.graph == load_json_resource('data/1.0.0/apply_dimension_max.json') @@ -242,7 +242,7 @@ def test_apply_dimension_max_lambda(con100): def test_apply_dimension_bandmath_lambda(con100): - from openeo.processes.processes import array_element + from openeo.processes import array_element im = con100.load_collection("S2") res = im.apply_dimension( process=lambda d: array_element(d, index=1) + array_element(d, index=2), @@ -265,7 +265,7 @@ def test_reduce_dimension_max_pgnode(con100): def test_reduce_dimension_max_callable(con100): im = con100.load_collection("S2") - from openeo.processes.processes import max + from openeo.processes import max res = im.reduce_dimension(reducer=max, dimension="bands") assert res.graph == load_json_resource('data/1.0.0/reduce_dimension_max.json') @@ -277,7 +277,7 @@ def test_reduce_dimension_max_lambda(con100): def test_reduce_dimension_bandmath_lambda(con100): - from openeo.processes.processes import array_element + from openeo.processes import array_element im = con100.load_collection("S2") res = im.reduce_dimension( reducer=lambda data: array_element(data, index=1) + array_element(data, index=2), @@ -306,7 +306,7 @@ def test_merge_cubes_add_pgnode(con100): def test_merge_cubes_add_callable(con100): im1 = con100.load_collection("S2") im2 = con100.load_collection("MASK") - from openeo.processes.processes import add + from openeo.processes import add res = im1.merge_cubes(other=im2, overlap_resolver=add) assert res.graph == load_json_resource('data/1.0.0/merge_cubes_add.json') @@ -338,7 +338,7 @@ def test_merge_cubes_max_pgnode(con100): def test_merge_cubes_max_callable(con100): im1 = con100.load_collection("S2") im2 = con100.load_collection("MASK") - from openeo.processes.processes import max + from openeo.processes import max res = im1.merge_cubes(other=im2, overlap_resolver=max) assert res.graph == load_json_resource('data/1.0.0/merge_cubes_max.json')