From 43eb04bc337a92cd5e858525a938bb9131cb1928 Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Mon, 14 Mar 2022 16:15:40 +0100 Subject: [PATCH] Issue #276 Deprecate `DataCube.send_job` in favor of `DataCube.create_job` Is more consistent internally and compared to other libraries/documentation --- CHANGELOG.md | 1 + docs/cookbook/sampling.rst | 2 +- examples/R_example.py | 2 +- examples/eodc_example.py | 2 +- examples/eurac_example.py | 2 +- examples/gee_example.py | 2 +- examples/gee_uc1_pol.py | 2 +- examples/gee_uc1_temp.py | 2 +- examples/mundialis_example.py | 2 +- examples/notebooks/EODC_Forum_2019/EODC.ipynb | 2 +- .../notebooks/EODC_Forum_2019/EURAC.ipynb | 2 +- examples/notebooks/EODC_Forum_2019/GEE.ipynb | 2 +- .../notebooks/EODC_Forum_2019/RBackend.ipynb | 2 +- examples/notebooks/EODC_Forum_2019/VITO.ipynb | 2 +- examples/notebooks/PoC_EODC.ipynb | 2 +- examples/notebooks/Sentinel2_NDVI_GEE.ipynb | 4 +- examples/notebooks/SimpleCompositor.ipynb | 2 +- examples/notebooks/UC1_GEE_Pol.ipynb | 2 +- examples/notebooks/UC1_GEE_Temp.ipynb | 2 +- .../openEO_Python_Sentinel2_EURAC.ipynb | 2 +- .../notebooks/openeo-terrascope-webinar.ipynb | 2 +- examples/vito_example.py | 2 +- openeo/imagecollection.py | 9 +++-- openeo/rest/connection.py | 1 - openeo/rest/datacube.py | 7 ++-- openeo/rest/imagecollectionclient.py | 6 ++- openeo/rest/job.py | 6 +++ openeo/rest/mlmodel.py | 5 +-- openeo/rest/vectorcube.py | 6 ++- tests/data/batch_job.json | 26 ------------ tests/rest/datacube/test_datacube100.py | 40 +++++++++++++++++++ tests/rest/datacube/test_mlmodel.py | 2 +- tests/test_batch_jobs.py | 33 --------------- 33 files changed, 89 insertions(+), 97 deletions(-) delete mode 100644 tests/data/batch_job.json delete mode 100644 tests/test_batch_jobs.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e1b5f65b..b9159f684 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Drop `ImageCollection` from `DataCube`'s class hierarchy. - This practically removes very old (pre-0.4.0) methods like `date_range_filter` and `bbox_filter` from `DataCube`. ([#100](https://github.com/Open-EO/openeo-python-client/issues/100), [#278](https://github.com/Open-EO/openeo-python-client/issues/278)) +- Deprecate `DataCube.send_job` in favor of `DataCube.create_job` for better consistency (internally and with other libraries) ([#276](https://github.com/Open-EO/openeo-python-client/issues/276)) ### Removed diff --git a/docs/cookbook/sampling.rst b/docs/cookbook/sampling.rst index 1dc25ef5e..0a799dea6 100644 --- a/docs/cookbook/sampling.rst +++ b/docs/cookbook/sampling.rst @@ -31,7 +31,7 @@ Combining all of this, results in the following sample code:: temporal_extent=["2020-05-01","2020-06-01"] ) s2_bands = s2_bands.filter_spatial("https://artifactory.vgt.vito.be/testdata-public/parcels/test_10.geojson") - job = s2_bands.send_job(title="Sentinel2", description="Sentinel-2 L2A bands",out_format="netCDF",sample_by_feature=True) + job = s2_bands.create_job(title="Sentinel2", description="Sentinel-2 L2A bands",out_format="netCDF",sample_by_feature=True) Sampling only works for batch jobs, because it results in multiple output files, which can not be conveniently transferred in a synchronous call. diff --git a/examples/R_example.py b/examples/R_example.py index 0ee915bb4..c56909c73 100644 --- a/examples/R_example.py +++ b/examples/R_example.py @@ -50,7 +50,7 @@ # Test Job -job = pg.send_job() +job = pg.create_job() print(job.job_id) print(job.start_job()) print (job.describe_job()) diff --git a/examples/eodc_example.py b/examples/eodc_example.py index 863b43117..140eb4f10 100644 --- a/examples/eodc_example.py +++ b/examples/eodc_example.py @@ -35,7 +35,7 @@ composite = timeseries.min_time() logging.debug(timeseries.to_json(indent=None)) -job = timeseries.send_job() +job = timeseries.create_job() logging.debug("{}".format(job.job_id)) status = job.queue() diff --git a/examples/eurac_example.py b/examples/eurac_example.py index 26f8bd108..aef935dd6 100644 --- a/examples/eurac_example.py +++ b/examples/eurac_example.py @@ -42,7 +42,7 @@ print(datacube.to_json()) # Submit your process graph as new batch job to back-end -job = datacube.send_job() +job = datacube.create_job() # Launch processing of submitted batch job if job.job_id: diff --git a/examples/gee_example.py b/examples/gee_example.py index dfc96363d..ede297ad2 100644 --- a/examples/gee_example.py +++ b/examples/gee_example.py @@ -50,7 +50,7 @@ # Send Job to backend -job = datacube.send_job() +job = datacube.create_job() print(job.describe_job()) # Wait for job to finish and download diff --git a/examples/gee_uc1_pol.py b/examples/gee_uc1_pol.py index b8d5d6bf7..0b009206a 100644 --- a/examples/gee_uc1_pol.py +++ b/examples/gee_uc1_pol.py @@ -52,7 +52,7 @@ print(datacube.to_json()) # Send Job to backend -job = datacube.send_job() +job = datacube.create_job() res = job.start_and_wait().download_results() for key, val in res.items(): diff --git a/examples/gee_uc1_temp.py b/examples/gee_uc1_temp.py index f783ec026..7074cbcc7 100644 --- a/examples/gee_uc1_temp.py +++ b/examples/gee_uc1_temp.py @@ -37,7 +37,7 @@ print(datacube.to_json()) # Send Job to backend -job = datacube.send_job() +job = datacube.create_job() job.start_and_wait().download_results() #print(job.job_id) #print(job.start_job()) diff --git a/examples/mundialis_example.py b/examples/mundialis_example.py index 38c401321..0561039d4 100644 --- a/examples/mundialis_example.py +++ b/examples/mundialis_example.py @@ -44,7 +44,7 @@ # datacube.download("/tmp/testfile.tiff", format="GeoTIFF") -job = datacube.send_job() +job = datacube.create_job() if job: print(job.job_id) print(job.start_job()) diff --git a/examples/notebooks/EODC_Forum_2019/EODC.ipynb b/examples/notebooks/EODC_Forum_2019/EODC.ipynb index b6b0d4d95..cd3ac6de2 100755 --- a/examples/notebooks/EODC_Forum_2019/EODC.ipynb +++ b/examples/notebooks/EODC_Forum_2019/EODC.ipynb @@ -262,7 +262,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "job.start_job()\n", "job" ] diff --git a/examples/notebooks/EODC_Forum_2019/EURAC.ipynb b/examples/notebooks/EODC_Forum_2019/EURAC.ipynb index 0973f008d..dea171358 100644 --- a/examples/notebooks/EODC_Forum_2019/EURAC.ipynb +++ b/examples/notebooks/EODC_Forum_2019/EURAC.ipynb @@ -451,7 +451,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "job.start_job()\n", "job" ] diff --git a/examples/notebooks/EODC_Forum_2019/GEE.ipynb b/examples/notebooks/EODC_Forum_2019/GEE.ipynb index b92939ee5..4769e9185 100644 --- a/examples/notebooks/EODC_Forum_2019/GEE.ipynb +++ b/examples/notebooks/EODC_Forum_2019/GEE.ipynb @@ -734,7 +734,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "job.start_job()\n", "job" ] diff --git a/examples/notebooks/EODC_Forum_2019/RBackend.ipynb b/examples/notebooks/EODC_Forum_2019/RBackend.ipynb index a02eb245b..2b8db1fd4 100644 --- a/examples/notebooks/EODC_Forum_2019/RBackend.ipynb +++ b/examples/notebooks/EODC_Forum_2019/RBackend.ipynb @@ -471,7 +471,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "job.start_job()\n", "job" ] diff --git a/examples/notebooks/EODC_Forum_2019/VITO.ipynb b/examples/notebooks/EODC_Forum_2019/VITO.ipynb index 15b78c269..c3b7f3834 100644 --- a/examples/notebooks/EODC_Forum_2019/VITO.ipynb +++ b/examples/notebooks/EODC_Forum_2019/VITO.ipynb @@ -763,7 +763,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "job.start_job()\n", "job" ] diff --git a/examples/notebooks/PoC_EODC.ipynb b/examples/notebooks/PoC_EODC.ipynb index bfa38822a..d7182711e 100755 --- a/examples/notebooks/PoC_EODC.ipynb +++ b/examples/notebooks/PoC_EODC.ipynb @@ -288,7 +288,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = timeseries.send_job()\n", + "job = timeseries.create_job()\n", "\n", "job" ] diff --git a/examples/notebooks/Sentinel2_NDVI_GEE.ipynb b/examples/notebooks/Sentinel2_NDVI_GEE.ipynb index 7c759d12a..d918264eb 100644 --- a/examples/notebooks/Sentinel2_NDVI_GEE.ipynb +++ b/examples/notebooks/Sentinel2_NDVI_GEE.ipynb @@ -2117,7 +2117,7 @@ "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mJobFailedException\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Sending the job to the backend\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mjob\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdatacube\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msend_job\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mjob\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstart_and_wait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdownload_results\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mOUTPUT_FILE\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mjob\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Sending the job to the backend\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mjob\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdatacube\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcreate_job\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mjob\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstart_and_wait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdownload_results\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mOUTPUT_FILE\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mjob\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m/data/REPO/openeo-python-client/openeo/rest/job.py\u001b[0m in \u001b[0;36mstart_and_wait\u001b[0;34m(self, print, max_poll_interval, connection_retry_interval)\u001b[0m\n\u001b[1;32m 222\u001b[0m raise JobFailedException(\"Batch job {i} didn't finish properly. Status: {s} (after {t}).\".format(\n\u001b[1;32m 223\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjob_id\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ms\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mstatus\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0melapsed\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 224\u001b[0;31m ), job=self)\n\u001b[0m\u001b[1;32m 225\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 226\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mJobFailedException\u001b[0m: Batch job XfUBRlYFgKe3SBvA didn't finish properly. Status: error (after 0:00:11.604513)." ] @@ -2125,7 +2125,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "job.start_and_wait().download_results(OUTPUT_FILE)\n", "job" ] diff --git a/examples/notebooks/SimpleCompositor.ipynb b/examples/notebooks/SimpleCompositor.ipynb index 2b9802fc9..2fa13b19e 100644 --- a/examples/notebooks/SimpleCompositor.ipynb +++ b/examples/notebooks/SimpleCompositor.ipynb @@ -1455,7 +1455,7 @@ } ], "source": [ - "composite_job = composite.save_result(format='gtiff').send_job()\n", + "composite_job = composite.save_result(format='gtiff').create_job()\n", "composite_job.start_and_wait().get_results().download_file(\"./composite.tiff\")" ] }, diff --git a/examples/notebooks/UC1_GEE_Pol.ipynb b/examples/notebooks/UC1_GEE_Pol.ipynb index f8f2fb462..3ba1fe05e 100644 --- a/examples/notebooks/UC1_GEE_Pol.ipynb +++ b/examples/notebooks/UC1_GEE_Pol.ipynb @@ -232,7 +232,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "results = job.start_and_wait().download_results()\n", "results" ] diff --git a/examples/notebooks/UC1_GEE_Temp.ipynb b/examples/notebooks/UC1_GEE_Temp.ipynb index fc8820525..fea1ff3d1 100644 --- a/examples/notebooks/UC1_GEE_Temp.ipynb +++ b/examples/notebooks/UC1_GEE_Temp.ipynb @@ -223,7 +223,7 @@ ], "source": [ "# Sending the job to the backend\n", - "job = datacube.send_job()\n", + "job = datacube.create_job()\n", "results = job.start_and_wait().download_results()\n", "results" ] diff --git a/examples/notebooks/openEO_Python_Sentinel2_EURAC.ipynb b/examples/notebooks/openEO_Python_Sentinel2_EURAC.ipynb index c1b321542..6eed2a039 100644 --- a/examples/notebooks/openEO_Python_Sentinel2_EURAC.ipynb +++ b/examples/notebooks/openEO_Python_Sentinel2_EURAC.ipynb @@ -299,7 +299,7 @@ "outputs": [], "source": [ "# submit your process graph as new batch job to back-end\n", - "job = datacube.send_job()" + "job = datacube.create_job()" ] }, { diff --git a/examples/notebooks/openeo-terrascope-webinar.ipynb b/examples/notebooks/openeo-terrascope-webinar.ipynb index d7f8bf545..2f23abb89 100644 --- a/examples/notebooks/openeo-terrascope-webinar.ipynb +++ b/examples/notebooks/openeo-terrascope-webinar.ipynb @@ -1201,7 +1201,7 @@ "metadata": {}, "outputs": [], "source": [ - "job = masked_timeseries.send_job()" + "job = masked_timeseries.create_job()" ] }, { diff --git a/examples/vito_example.py b/examples/vito_example.py index cf564b0c5..c4960bd84 100644 --- a/examples/vito_example.py +++ b/examples/vito_example.py @@ -38,7 +38,7 @@ datacube.download("/tmp/testfile.tiff") -job = datacube.send_job() +job = datacube.create_job() if job: print(job.job_id) print(job.run_synchronous("/tmp/testfile")) diff --git a/openeo/imagecollection.py b/openeo/imagecollection.py index 8ddccbb50..e23619c4c 100644 --- a/openeo/imagecollection.py +++ b/openeo/imagecollection.py @@ -9,8 +9,7 @@ from openeo.rest.job import RESTJob from openeo.rest.service import Service -from openeo.util import get_temporal_extent, first_not_none, dict_no_none - +from openeo.util import get_temporal_extent, first_not_none, dict_no_none, legacy_alias if hasattr(typing, 'TYPE_CHECKING') and typing.TYPE_CHECKING: # Imports for type checking only (circular import issue at runtime). `hasattr` is Python 3.5 workaround #210 @@ -539,7 +538,7 @@ def execute_batch( This method is mostly recommended if the batch job is expected to run in a reasonable amount of time. For very long running jobs, you probably do not want to keep the client running. In that case, using - :func:`~openeo.imagecollection.ImageCollection.send_job` might be more appropriate. + :func:`~openeo.imagecollection.ImageCollection.create_job` might be more appropriate. :param job_options: A dictionary containing (custom) job options :param outputfile: The path of a file to which a result can be written @@ -549,7 +548,7 @@ def execute_batch( """ pass - def send_job(self, out_format:str=None, job_options:Dict=None, **format_options) -> RESTJob: + def create_job(self, out_format:str=None, job_options:Dict=None, **format_options) -> RESTJob: """ Sends a job to the backend and returns a RESTJob instance. The job will still need to be started and managed explicitly. The :func:`~openeo.imagecollection.ImageCollection.execute_batch` method allows you to run batch jobs without managing it. @@ -561,6 +560,8 @@ def send_job(self, out_format:str=None, job_options:Dict=None, **format_options) """ pass + send_job = legacy_alias(create_job, name="send_job") + def pipe(self, func: Callable, *args, **kwargs): """ Pipe the image collection through a function and return the result. diff --git a/openeo/rest/connection.py b/openeo/rest/connection.py index d8fb5a597..0a5305e49 100644 --- a/openeo/rest/connection.py +++ b/openeo/rest/connection.py @@ -1048,7 +1048,6 @@ def create_job( :return: job_id: String Job id of the new created job """ # TODO move all this (RESTJob factory) logic to RESTJob? - # TODO: unify Connection.create_job vs DataCube.send_job. #276 req = self._build_request_with_process_graph( process_graph=process_graph, **dict_no_none(title=title, description=description, plan=plan, budget=budget) diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index c95d2d768..212cde8fd 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -1543,13 +1543,13 @@ def execute_batch( :param format_options: String Parameters for the job result format """ - job = self.send_job(out_format, job_options=job_options, **format_options) + job = self.create_job(out_format, job_options=job_options, **format_options) return job.run_synchronous( outputfile=outputfile, print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval ) - def send_job( + def create_job( self, out_format=None, title: str = None, description: str = None, plan: str = None, budget=None, job_options=None, **format_options ) -> RESTJob: @@ -1563,7 +1563,6 @@ def send_job( :return: status: Job resulting job. """ # TODO: add option to also automatically start the job? - # TODO: unify Connection.create_job vs DataCube.send_job. #276 img = self if out_format: # add `save_result` node @@ -1573,6 +1572,8 @@ def send_job( title=title, description=description, plan=plan, budget=budget, additional=job_options ) + send_job = legacy_alias(create_job, name="send_job") + def save_user_defined_process( self, user_defined_process_id: str, diff --git a/openeo/rest/imagecollectionclient.py b/openeo/rest/imagecollectionclient.py index d5816a2ee..c9f5137f6 100644 --- a/openeo/rest/imagecollectionclient.py +++ b/openeo/rest/imagecollectionclient.py @@ -1003,14 +1003,14 @@ def execute_batch( :param format_options: String Parameters for the job result format """ - job = self.send_job(out_format, job_options=job_options, **format_options) + job = self.create_job(out_format, job_options=job_options, **format_options) return job.run_synchronous( # TODO #135 support multi file result sets too outputfile=outputfile, print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval ) - def send_job( + def create_job( self, out_format=None, title: str = None, description: str = None, plan: str = None, budget=None, job_options=None, **format_options ) -> RESTJob: @@ -1033,6 +1033,8 @@ def send_job( title=title, description=description, plan=plan, budget=budget, additional=job_options ) + send_job = legacy_alias(create_job, name="send_job") + def execute(self) -> Dict: """Executes the process graph of the imagery. """ newbuilder = self.builder.shallow_copy() diff --git a/openeo/rest/job.py b/openeo/rest/job.py index a6ee9afb2..b7c8a5b86 100644 --- a/openeo/rest/job.py +++ b/openeo/rest/job.py @@ -44,6 +44,7 @@ def _repr_html_(self): def describe_job(self) -> dict: """ Get all job information.""" # GET /jobs/{job_id} + # TODO: rename to just `describe`? #280 return self.connection.get("/jobs/{}".format(self.job_id), expected_status=200).json() def update_job(self, process_graph=None, output_format=None, @@ -51,11 +52,13 @@ def update_job(self, process_graph=None, output_format=None, plan=None, budget=None, additional=None): """ Update a job.""" # PATCH /jobs/{job_id} + # TODO: rename to just `update`? #280 raise NotImplementedError def delete_job(self): """ Delete a job.""" # DELETE /jobs/{job_id} + # TODO: rename to just `delete`? #280 self.connection.delete("/jobs/{}".format(self.job_id), expected_status=204) def estimate_job(self): @@ -68,11 +71,14 @@ def estimate_job(self): def start_job(self): """ Start / queue a job for processing.""" # POST /jobs/{job_id}/results + # TODO: rename to just `start`? #280 + # TODO: return self, to allow chained calls self.connection.post("/jobs/{}/results".format(self.job_id), expected_status=202) def stop_job(self): """ Stop / cancel job processing.""" # DELETE /jobs/{job_id}/results + # TODO: rename to just `stop`? #280 self.connection.delete("/jobs/{}/results".format(self.job_id), expected_status=204) @deprecated("Use :py:meth:`~RESTJOB.get_results` instead.", version="0.4.10") diff --git a/openeo/rest/mlmodel.py b/openeo/rest/mlmodel.py index 3f1f9f306..8d99161a8 100644 --- a/openeo/rest/mlmodel.py +++ b/openeo/rest/mlmodel.py @@ -77,17 +77,16 @@ def execute_batch( :param outputfile: The path of a file to which a result can be written :param out_format: (optional) Format of the job result. :param format_options: String Parameters for the job result format - """ # TODO: check/warn about final `save_ml_model` node? - job = self.send_job(additional=job_options) + job = self.create_job(additional=job_options) return job.run_synchronous( # TODO #135 support multi file result sets too outputfile=outputfile, print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval ) - def send_job(self, **kwargs) -> RESTJob: + def create_job(self, **kwargs) -> RESTJob: """ Sends a job to the backend and returns a ClientJob instance. diff --git a/openeo/rest/vectorcube.py b/openeo/rest/vectorcube.py index c0229f294..d8e620b5b 100644 --- a/openeo/rest/vectorcube.py +++ b/openeo/rest/vectorcube.py @@ -122,14 +122,14 @@ def execute_batch( :param format_options: String Parameters for the job result format """ - job = self.send_job(out_format, job_options=job_options, **format_options) + job = self.create_job(out_format, job_options=job_options, **format_options) return job.run_synchronous( # TODO #135 support multi file result sets too outputfile=outputfile, print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval ) - def send_job(self, out_format=None, job_options=None, **format_options) -> RESTJob: + def create_job(self, out_format=None, job_options=None, **format_options) -> RESTJob: """ Sends a job to the backend and returns a ClientJob instance. @@ -143,3 +143,5 @@ def send_job(self, out_format=None, job_options=None, **format_options) -> RESTJ # add `save_result` node shp = shp.save_result(format=out_format, options=format_options) return self._connection.create_job(process_graph=shp.flat_graph(), additional=job_options) + + send_job = legacy_alias(create_job, name="send_job") diff --git a/tests/data/batch_job.json b/tests/data/batch_job.json deleted file mode 100644 index 03f274760..000000000 --- a/tests/data/batch_job.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "process_graph": { - "loadcollection1": { - "process_id": "load_collection", - "arguments": { - "id": "SENTINEL2_RADIOMETRY_10M", - "spatial_extent": null, - "temporal_extent": null - }, - "result": false - }, - "saveresult1": { - "process_id": "save_result", - "arguments": { - "data": { - "from_node": "loadcollection1" - }, - "options": {}, - "format": "GTIFF" - }, - "result": true - } - }, - "title": "my job", - "description": "just testing" -} \ No newline at end of file diff --git a/tests/rest/datacube/test_datacube100.py b/tests/rest/datacube/test_datacube100.py index 255f88082..fed44a9ac 100644 --- a/tests/rest/datacube/test_datacube100.py +++ b/tests/rest/datacube/test_datacube100.py @@ -7,6 +7,7 @@ import re import sys import textwrap +from typing import Optional import pytest import requests @@ -1535,3 +1536,42 @@ def test_merge_if(con100): "result": True } } + + +class TestBatchJob: + _EXPECTED_SIMPLE_S2_JOB = {"process": {"process_graph": { + "loadcollection1": { + "process_id": "load_collection", + "arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None} + }, + "saveresult1": { + "process_id": "save_result", + "arguments": {"data": {"from_node": "loadcollection1"}, "format": "GTiff", "options": {}}, + "result": True, + } + }}} + + def _get_handler_post_jobs(self, expected_post_data: Optional[dict] = None, job_id: str = "myj0b1"): + """Create `POST /jobs` handler""" + expected_post_data = expected_post_data or self._EXPECTED_SIMPLE_S2_JOB + + def post_jobs(request, context): + assert request.json() == expected_post_data + context.status_code = 201 + context.headers["OpenEO-Identifier"] = job_id + + return post_jobs + + def test_create_job_basic(self, con100, requests_mock): + requests_mock.post(API_URL + "/jobs", json=self._get_handler_post_jobs()) + cube = con100.load_collection("S2") + job = cube.create_job(out_format="GTiff") + assert job.job_id == "myj0b1" + + def test_legacy_send_job(self, con100, requests_mock): + """Legacy `DataCube.send_job` alis for `create_job""" + requests_mock.post(API_URL + "/jobs", json=self._get_handler_post_jobs()) + cube = con100.load_collection("S2") + with pytest.warns(DeprecationWarning, match="Call to deprecated method `send_job`, use `create_job` instead."): + job = cube.send_job(out_format="GTiff") + assert job.job_id == "myj0b1" diff --git a/tests/rest/datacube/test_mlmodel.py b/tests/rest/datacube/test_mlmodel.py index c00479f80..bfbd5a4c4 100644 --- a/tests/rest/datacube/test_mlmodel.py +++ b/tests/rest/datacube/test_mlmodel.py @@ -73,5 +73,5 @@ def post_jobs(request, context): requests_mock.post(API_URL + "/jobs", json=post_jobs) - job = ml_model.send_job(title="Random forest") + job = ml_model.create_job(title="Random forest") assert job.job_id == "job-rf" diff --git a/tests/test_batch_jobs.py b/tests/test_batch_jobs.py deleted file mode 100644 index 8c6f58cc0..000000000 --- a/tests/test_batch_jobs.py +++ /dev/null @@ -1,33 +0,0 @@ -from unittest import TestCase - -import requests_mock - -import openeo -from openeo.internal.graphbuilder_040 import GraphBuilder -from . import load_json_resource - - -@requests_mock.mock() -class TestBatchJobs(TestCase): - - def setUp(self) -> None: - GraphBuilder.id_counter = {} - - def test_create_job(self, m): - m.get("http://localhost:8000/api/", json={"api_version": "0.4.0"}) - m.get("http://localhost:8000/api/collections/SENTINEL2_RADIOMETRY_10M", json={}) - - def match_body(request): - assert request.json() == load_json_resource("data/batch_job.json") - return True - - headers = { - "OpenEO-Identifier": "my-identifier", - "Location": "http://localhost:8000/api/jobs/my-identifier" - } - m.post("http://localhost:8000/api/jobs", status_code=201, headers=headers, additional_matcher=match_body) - - session = openeo.connect("http://localhost:8000/api") - s2_radio = session.imagecollection("SENTINEL2_RADIOMETRY_10M") - job = s2_radio.send_job(out_format="GTIFF", title="my job", description="just testing") - assert job.job_id == "my-identifier"