diff --git a/docs/requirements.txt b/docs/requirements.txt index 8c973ac7a..df0e58bfb 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ # Required by the python script for building documentation poetry==1.2.0b2 -Sphinx>=4,<5 +Sphinx>=5,<8 sphinx-rtd-theme>=1,<2 -sphinx-tabs>=3,<4 -sphinx-toolbox==3.0.0 +sphinx-tabs>=3.4.0,<4 +sphinx-toolbox>=3,<4 git+https://github.com/octue/octue-sdk-python.git@main diff --git a/docs/source/asking_questions.rst b/docs/source/asking_questions.rst index 36db95bc8..a26090e2e 100644 --- a/docs/source/asking_questions.rst +++ b/docs/source/asking_questions.rst @@ -96,17 +96,23 @@ access the event store and run: events = get_events( table_id="your-project.your-dataset.your-table", - sender="octue/test-service:1.0.0", question_uuid="53353901-0b47-44e7-9da3-a3ed59990a71", ) **Options** +- ``question_uuid`` - Retrieve events from this specific question +- ``parent_question_uuid`` - Retrieve events from questions triggered by the same parent question (this doesn't include the parent question's events) +- ``originator_question_uuid`` - Retrieve events for the entire tree of questions triggered by an originator question (a question asked manually through ``Child.ask``; this does include the originator question's events) - ``kind`` - Only retrieve this kind of event if present (e.g. "result") - ``include_backend_metadata`` - If ``True``, retrieve information about the service backend that produced the event - ``limit`` - If set to a positive integer, limit the number of events returned to this +.. note:: + + Only one of ``question_uuid``, ``parent_question_uuid``, and ``originator_question_uuid`` can be provided at one time. + .. collapse:: See an example output here... @@ -116,8 +122,8 @@ access the event store and run: [ { "event": { - "datetime": "2024-03-06T15:44:18.156044", - "kind": "delivery_acknowledgement" + "kind": "delivery_acknowledgement", + "datetime": "2024-03-06T15:44:18.156044" }, }, { @@ -149,21 +155,10 @@ access the event store and run: }, { "event": { - "datetime": "2024-03-06T15:46:18.167424", - "kind": "heartbeat" + "kind": "heartbeat", + "datetime": "2024-03-06T15:46:18.167424" }, - "attributes": { - "datetime": "2024-04-11T10:46:48.236064", - "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "7", - "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", - "originator": "octue/test-service:1.0.0", - "sender": "octue/test-service:1.0.0", - "sender_type": "CHILD", - "sender_sdk_version": "0.51.0", - "recipient": "octue/another-service:3.2.1" - } - } + }, { "event": { "kind": "result", @@ -203,9 +198,9 @@ raised and no answers are returned. {"input_values": {"height": 7, "width": 32}}, ) >>> [ - {"output_values": {"some": "output"}, "output_manifest": None}, - {"output_values": {"another": "result"}, "output_manifest": None}, - {"output_values": {"different": "result"}, "output_manifest": None}, + ({"output_values": {"some": "output"}, "output_manifest": None}, '2681ef4e-4ab7-4cf9-8783-aad982d5e324'), + ({"output_values": {"another": "result"}, "output_manifest": None}, '474923bd-14b6-4f4c-9bfe-8148358f35cd'), + ({"output_values": {"different": "result"}, "output_manifest": None}, '9a50daae-2328-4728-9ddd-b2252474f118'), ] This method uses multithreading, allowing all the questions to be asked at once instead of one after another. @@ -213,13 +208,15 @@ This method uses multithreading, allowing all the questions to be asked at once **Options** - If ``raise_errors=False`` is provided, answers are returned for all successful questions while unraised errors are - returned for unsuccessful ones + logged and returned for unsuccessful ones - If ``raise_errors=False`` is provided with ``max_retries > 0``, failed questions are retried up to this number of times - If ``raise_errors=False`` is provided with ``max_retries > 0`` and ``prevent_retries_when`` is set to a list of exception types, failed questions are retried except for those whose exception types are in the list -- The maximum number of threads that can be used to ask questions in parallel can be set via the ``max_workers`` - argument. This has no effect on the total number of questions that can be asked via ``Child.ask_multiple``. +- ``max_workers``: The maximum number of threads that can be used to ask questions in parallel can be set via this + argument. It has no effect on the total number of questions that can be asked via ``Child.ask_multiple``. +- ``log_errors``: If `True` and ``raise_errors=False``, any errors remaining once retries are exhausted are logged in + addition to being returned Asking a question within a service diff --git a/docs/source/dataset.rst b/docs/source/dataset.rst index 3c849c27d..1ad79450b 100644 --- a/docs/source/dataset.rst +++ b/docs/source/dataset.rst @@ -37,9 +37,14 @@ Working with a dataset is the same whether it's local or cloud-based. from octue.resources import Dataset - dataset = Dataset(path="path/to/dataset", recursive=True) + dataset = Dataset(path="path/to/dataset") - dataset = Dataset(path="gs://my-bucket/path/to/dataset", recursive=True) + dataset = Dataset(path="gs://my-bucket/path/to/dataset") + + +.. warning:: + + Datasets recurse all subdirectories by default unless ``recursive=False`` is set. Upload a dataset diff --git a/docs/source/inter_service_compatibility.rst b/docs/source/inter_service_compatibility.rst index 8fb7a5725..ea5183eb1 100644 --- a/docs/source/inter_service_compatibility.rst +++ b/docs/source/inter_service_compatibility.rst @@ -18,80 +18,82 @@ the parent, just that a child is able to accept a question. - ``0`` = incompatible - ``1`` = compatible -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| | 0.55.0 | 0.54.0 | 0.53.0 | 0.52.2 | 0.52.1 | 0.52.0 | 0.51.0 | 0.50.1 | 0.50.0 | 0.49.2 | 0.49.1 | 0.49.0 | 0.48.0 | 0.47.2 | 0.47.1 | 0.47.0 | 0.46.3 | 0.46.2 | 0.46.1 | 0.46.0 | 0.45.0 | 0.44.0 | 0.43.7 | 0.43.6 | 0.43.5 | 0.43.4 | 0.43.3 | 0.43.2 | 0.43.1 | 0.43.0 | 0.42.1 | 0.42.0 | 0.41.1 | 0.41.0 | 0.40.2 | 0.40.1 | 0.40.0 | -+========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+ -| 0.55.0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.54.0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.53.0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.52.2 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.52.1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.52.0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.51.0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.50.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.50.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.49.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.49.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.49.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.48.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.47.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.47.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.47.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.46.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.46.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.46.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.46.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.45.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.44.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.43.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.42.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.42.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.41.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.41.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.40.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.40.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ -| 0.40.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | -+--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| | 0.56.0 | 0.55.0 | 0.54.0 | 0.53.0 | 0.52.2 | 0.52.1 | 0.52.0 | 0.51.0 | 0.50.1 | 0.50.0 | 0.49.2 | 0.49.1 | 0.49.0 | 0.48.0 | 0.47.2 | 0.47.1 | 0.47.0 | 0.46.3 | 0.46.2 | 0.46.1 | 0.46.0 | 0.45.0 | 0.44.0 | 0.43.7 | 0.43.6 | 0.43.5 | 0.43.4 | 0.43.3 | 0.43.2 | 0.43.1 | 0.43.0 | 0.42.1 | 0.42.0 | 0.41.1 | 0.41.0 | 0.40.2 | 0.40.1 | 0.40.0 | ++========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+==========+ +| 0.56.0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.55.0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.54.0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.53.0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.52.2 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.52.1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.52.0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.51.0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.50.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.50.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.49.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.49.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.49.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.48.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.47.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.47.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.47.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.46.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.46.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.46.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.46.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.45.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.44.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.43.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.42.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.42.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.41.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.41.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.40.2 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.40.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ +| 0.40.0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | ++--------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+----------+ diff --git a/docs/source/manifest.rst b/docs/source/manifest.rst index 7a13fc808..21e503479 100644 --- a/docs/source/manifest.rst +++ b/docs/source/manifest.rst @@ -81,6 +81,10 @@ Download all or a subset of datasets from a manifest. "my_dataset": "/path/to/dataset" } +.. note:: + + Datasets are downloaded to temporary directories if no paths are given. + Further information =================== diff --git a/docs/source/testing_services.rst b/docs/source/testing_services.rst index cf3612922..996f089f4 100644 --- a/docs/source/testing_services.rst +++ b/docs/source/testing_services.rst @@ -178,7 +178,7 @@ To emulate your children in tests, patch the :mod:`Child self.skip_missing_events_after: - event = self._skip_to_earliest_waiting_event() + return (None, None) - # Declare there are no more missing events. - self._missing_event_detection_time = None - - if not event: - return - - else: - return - - result = self._handle_event(event) - - if result is not None: - return result - - def _skip_to_earliest_waiting_event(self): - """Get the earliest waiting event and set the event handler up to continue from it. - - :return dict|None: the earliest waiting event if there is one - """ - try: - event = self.waiting_events.pop(self._earliest_waiting_event_number) - except KeyError: - return - - number_of_missing_events = self._earliest_waiting_event_number - self._previous_event_number - 1 - - # Let the event handler know it can handle the next earliest event. - self._previous_event_number = self._earliest_waiting_event_number - 1 - - logger.warning( - "%r: %d consecutive events missing for question %r after %ds - skipping to next earliest waiting event " - "(event %d).", - self.recipient, - number_of_missing_events, - self.question_uuid, - self.skip_missing_events_after, - self._earliest_waiting_event_number, + logger.debug( + "%r: Received an event related to question %r.", + attributes["recipient"], + attributes["question_uuid"], ) - return event + return (event, attributes) - def _handle_event(self, event): - """Pass an event to its handler and update the previous event number. + def _handle_event(self, event, attributes): + """Pass an event to its handler and record it if appropriate. :param dict event: the event to handle + :param dict attributes: the event's attributes :return dict|None: the output of the event (this should be `None` unless the event is a "result" event) """ - self._previous_event_number += 1 - if self.record_events: - self.handled_events.append(event) + self.handled_events.append({"event": event, "attributes": attributes}) if self.only_handle_result and event["kind"] != "result": return handler = self._event_handlers[event["kind"]] - return handler(event) + return handler(event, attributes) - def _handle_delivery_acknowledgement(self, event): + def _handle_delivery_acknowledgement(self, event, attributes): """Log that the question was delivered. :param dict event: + :param dict attributes: the event's attributes :return None: """ - logger.info("%r's question was delivered at %s.", self.recipient, event["datetime"]) + logger.info("%r's question was delivered at %s.", attributes["recipient"], attributes["datetime"]) - def _handle_heartbeat(self, event): + def _handle_heartbeat(self, event, attributes): """Record the time the heartbeat was received. :param dict event: + :param dict attributes: the event's attributes :return None: """ self._last_heartbeat = datetime.now() + logger.info( "%r: Received a heartbeat from service %r for question %r.", - self.recipient, - self.child_sruid, - self.question_uuid, + attributes["recipient"], + attributes["sender"], + attributes["question_uuid"], ) - def _handle_monitor_message(self, event): + def _handle_monitor_message(self, event, attributes): """Send the monitor message to the handler if one has been provided. :param dict event: + :param dict attributes: the event's attributes :return None: """ logger.debug( "%r: Received a monitor message from service %r for question %r.", - self.recipient, - self.child_sruid, - self.question_uuid, + attributes["recipient"], + attributes["sender"], + attributes["question_uuid"], ) if self.handle_monitor_message is not None: self.handle_monitor_message(event["data"]) - def _handle_log_message(self, event): + def _handle_log_message(self, event, attributes): """Deserialise the event into a log record and pass it to the local log handlers. The child's SRUID and the question UUID are added to the start of the log message, and the SRUIDs of any subchildren called by the child are each coloured differently. :param dict event: + :param dict attributes: the event's attributes :return None: """ record = logging.makeLogRecord(event["log_record"]) - # Add information about the immediate child sending the event and colour it with the first colour in the - # colour palette. - immediate_child_analysis_section = colourise( - f"[{self.child_sruid} | analysis-{self.question_uuid}]", - text_colour=self._log_message_colours[0], - ) - - # Colour any analysis sections from children of the immediate child with the rest of the colour palette. + # Split the log message into its parts. subchild_analysis_sections = [section.strip("[") for section in re.split("] ", record.msg)] final_message = subchild_analysis_sections.pop(-1) - for i in range(len(subchild_analysis_sections)): - subchild_analysis_sections[i] = colourise( - "[" + subchild_analysis_sections[i] + "]", - text_colour=self._log_message_colours[1:][i % len(self._log_message_colours[1:])], + if self.include_service_metadata_in_logs: + # Get information about the immediate child sending the event and colour it with the first colour in the + # colour palette. + immediate_child_analysis_section = colourise( + f"[{attributes['sender']} | {attributes['question_uuid']}]", + text_colour=self._log_message_colours[0], ) - record.msg = " ".join([immediate_child_analysis_section, *subchild_analysis_sections, final_message]) + # Colour any analysis sections from children of the immediate child with the rest of the colour palette. + for i in range(len(subchild_analysis_sections)): + subchild_analysis_sections[i] = colourise( + "[" + subchild_analysis_sections[i] + "]", + text_colour=self._log_message_colours[1:][i % len(self._log_message_colours[1:])], + ) + + record.msg = " ".join([immediate_child_analysis_section, *subchild_analysis_sections, final_message]) + + else: + record.msg = final_message + logger.handle(record) - def _handle_exception(self, event): + def _handle_exception(self, event, attributes): """Raise the exception from the child. :param dict event: + :param dict attributes: the event's attributes :raise Exception: :return None: """ exception_message = "\n\n".join( ( event["exception_message"], - f"The following traceback was captured from the remote service {self.child_sruid!r}:", + f"The following traceback was captured from the remote service {attributes['sender']!r}:", "".join(event["exception_traceback"]), ) ) @@ -357,13 +251,14 @@ def _handle_exception(self, event): raise exception_type(exception_message) - def _handle_result(self, event): + def _handle_result(self, event, attributes): """Extract any output values and output manifest from the result, deserialising the manifest if present. :param dict event: + :param dict attributes: the event's attributes :return dict: """ - logger.info("%r: Received an answer to question %r.", self.recipient, self.question_uuid) + logger.info("%r: Received an answer to question %r.", attributes["recipient"], attributes["question_uuid"]) if event.get("output_manifest"): output_manifest = Manifest.deserialise(event["output_manifest"]) diff --git a/octue/cloud/events/replayer.py b/octue/cloud/events/replayer.py index 203922b46..59560e3fc 100644 --- a/octue/cloud/events/replayer.py +++ b/octue/cloud/events/replayer.py @@ -2,41 +2,48 @@ from octue.cloud.events.handler import AbstractEventHandler from octue.cloud.events.validation import SERVICE_COMMUNICATION_SCHEMA -from octue.cloud.pub_sub.service import Service -from octue.resources.service_backends import ServiceBackend logger = logging.getLogger(__name__) class EventReplayer(AbstractEventHandler): - """A replayer for events retrieved asynchronously from storage. Missing events are immediately skipped. + """A replayer for events retrieved asynchronously from storage. - :param octue.cloud.pub_sub.service.Service recipient: the `Service` instance that's receiving the events :param callable|None handle_monitor_message: a function to handle monitor messages (e.g. send them to an endpoint for plotting or displaying) - this function should take a single JSON-compatible python primitive :param bool record_events: if `True`, record received events in the `received_events` attribute :param dict|None event_handlers: a mapping of event type names to callables that handle each type of event. The handlers must not mutate the events. :param dict|str schema: the JSON schema to validate events against + :param bool include_service_metadata_in_logs: if `True`, include the SRUIDs and question UUIDs of the service revisions involved in the question to the start of the log message :param bool only_handle_result: if `True`, skip non-result events and only handle the "result" event if present :return None: """ def __init__( self, - recipient=None, handle_monitor_message=None, record_events=True, event_handlers=None, schema=SERVICE_COMMUNICATION_SCHEMA, + include_service_metadata_in_logs=True, only_handle_result=False, ): + event_handlers = event_handlers or { + "question": self._handle_question, + "delivery_acknowledgement": self._handle_delivery_acknowledgement, + "heartbeat": self._handle_heartbeat, + "monitor_message": self._handle_monitor_message, + "log_record": self._handle_log_message, + "exception": self._handle_exception, + "result": self._handle_result, + } + super().__init__( - recipient or Service(backend=ServiceBackend(), service_id="local/local:local"), handle_monitor_message=handle_monitor_message, record_events=record_events, event_handlers=event_handlers, schema=schema, - skip_missing_events_after=0, + include_service_metadata_in_logs=include_service_metadata_in_logs, only_handle_result=only_handle_result, ) @@ -49,9 +56,16 @@ def handle_events(self, events): super().handle_events() for event in events: - self._extract_and_enqueue_event(event) + event, attributes = self._extract_and_validate_event(event) + + # Skip the event if it fails validation. + if not event: + continue - return self._attempt_to_handle_waiting_events() + result = self._handle_event(event, attributes) + + if result: + return result def _extract_event_and_attributes(self, container): """Extract an event and its attributes from the event container. @@ -59,5 +73,18 @@ def _extract_event_and_attributes(self, container): :param dict container: the container of the event :return (any, dict): the event and its attributes """ - container["attributes"]["order"] = int(container["attributes"]["order"]) return container["event"], container["attributes"] + + def _handle_question(self, event, attributes): + """Log that the question was sent. + + :param dict event: + :param dict attributes: the event's attributes + :return None: + """ + logger.info( + "%r asked a question %r to service %r.", + attributes["sender"], + attributes["question_uuid"], + attributes["recipient"], + ) diff --git a/octue/cloud/events/validation.py b/octue/cloud/events/validation.py index 422b42163..41df484db 100644 --- a/octue/cloud/events/validation.py +++ b/octue/cloud/events/validation.py @@ -1,16 +1,27 @@ import logging -import os import jsonschema from octue.compatibility import warn_if_incompatible -VALID_EVENT_KINDS = {"delivery_acknowledgement", "heartbeat", "log_record", "monitor_message", "exception", "result"} +VALID_EVENT_KINDS = { + "question", + "delivery_acknowledgement", + "heartbeat", + "log_record", + "monitor_message", + "exception", + "result", +} -SERVICE_COMMUNICATION_SCHEMA = {"$ref": "https://jsonschema.registry.octue.com/octue/service-communication/0.10.0.json"} +SERVICE_COMMUNICATION_SCHEMA_VERSION = "0.14.1" SERVICE_COMMUNICATION_SCHEMA_INFO_URL = "https://strands.octue.com/octue/service-communication" -SERVICE_COMMUNICATION_SCHEMA_VERSION = os.path.splitext(SERVICE_COMMUNICATION_SCHEMA["$ref"])[0].split("/")[-1] + +SERVICE_COMMUNICATION_SCHEMA = { + "$ref": f"https://jsonschema.registry.octue.com/octue/service-communication/{SERVICE_COMMUNICATION_SCHEMA_VERSION}.json" +} + # Instantiate a JSON schema validator to cache the service communication schema. This avoids downloading it from the # registry every time a message is validated against it. @@ -25,7 +36,7 @@ def is_event_valid(event, attributes, recipient, parent_sdk_version, child_sdk_v :param dict event: the event to validate :param dict attributes: the attributes of the event to validate - :param octue.cloud.pub_sub.service.Service recipient: the service receiving and validating the event + :param str recipient: the SRUID of the service revision receiving and validating the event :param str parent_sdk_version: the semantic version of Octue SDK running on the parent :param str child_sdk_version: the semantic version of Octue SDK running on the child :param dict|None schema: the schema to validate the event and its attributes against; if `None`, this defaults to the service communication schema used in this version of Octue SDK @@ -51,7 +62,7 @@ def raise_if_event_is_invalid(event, attributes, recipient, parent_sdk_version, :param dict event: the event to validate :param dict attributes: the attributes of the event to validate - :param octue.cloud.pub_sub.service.Service recipient: the service receiving and validating the event + :param str recipient: the SRUID of the service revision receiving and validating the event :param str parent_sdk_version: the semantic version of Octue SDK running on the parent :param str child_sdk_version: the semantic version of Octue SDK running on the child :param dict|None schema: the schema to validate the event and its attributes against; if `None`, this defaults to the service communication schema used in this version of Octue SDK diff --git a/octue/cloud/pub_sub/__init__.py b/octue/cloud/pub_sub/__init__.py index b6239757a..30c21aed7 100644 --- a/octue/cloud/pub_sub/__init__.py +++ b/octue/cloud/pub_sub/__init__.py @@ -14,6 +14,7 @@ def create_push_subscription( push_endpoint, subscription_filter=None, expiration_time=None, + allow_existing=True, ): """Create a Google Pub/Sub push subscription for an Octue service for it to receive questions from parents. If a corresponding topic doesn't exist, it will be created first. @@ -23,7 +24,8 @@ def create_push_subscription( :param str push_endpoint: the HTTP/HTTPS endpoint of the service to push to. It should be fully formed and include the 'https://' prefix :param str|None subscription_filter: if specified, the filter to apply to the subscription; otherwise, no filter is applied :param float|None expiration_time: the number of seconds of inactivity after which the subscription should expire. If not provided, no expiration time is applied to the subscription - :return None: + :param bool allow_existing: if True, don't raise an error if the subscription already exists + :return octue.cloud.pub_sub.subscription.Subscription: """ if expiration_time: expiration_time = float(expiration_time) @@ -38,4 +40,5 @@ def create_push_subscription( push_endpoint=push_endpoint, ) - subscription.create() + subscription.create(allow_existing=allow_existing) + return subscription diff --git a/octue/cloud/pub_sub/bigquery.py b/octue/cloud/pub_sub/bigquery.py index a5e26c432..25da8d294 100644 --- a/octue/cloud/pub_sub/bigquery.py +++ b/octue/cloud/pub_sub/bigquery.py @@ -1,99 +1,166 @@ from google.cloud.bigquery import Client, QueryJobConfig, ScalarQueryParameter from octue.cloud.events.validation import VALID_EVENT_KINDS -from octue.exceptions import ServiceNotFound -from octue.resources import Manifest -def get_events(table_id, sender, question_uuid, kind=None, include_backend_metadata=False, limit=1000): - """Get Octue service events for a question from a sender from a Google BigQuery event store. - - :param str table_id: the full ID of the table e.g. "your-project.your-dataset.your-table" - :param str sender: the SRUID of the sender of the events - :param str question_uuid: the UUID of the question to get the events for +DEFAULT_FIELDS = ( + "`originator_question_uuid`", + "`parent_question_uuid`", + "`question_uuid`", + "`kind`", + "`event`", + "`datetime`", + "`uuid`", + "`originator`", + "`parent`", + "`sender`", + "`sender_type`", + "`sender_sdk_version`", + "`recipient`", + "`other_attributes`", +) + +BACKEND_METADATA_FIELDS = ("`backend`", "`backend_metadata`") + + +def get_events( + table_id, + question_uuid=None, + parent_question_uuid=None, + originator_question_uuid=None, + kind=None, + include_backend_metadata=False, + tail=True, + limit=1000, +): + """Get Octue service events for a question from a Google BigQuery event store. Exactly one of the question UUID, + parent question UUID, or originator question UUID must be provided: + + - When a question UUID is specified, only events from that question are retrieved + - When a parent question UUID is specified, events from questions triggered by the same parent question are retrieved, not including the parent question's events + - When an originator question UUID is specified, events for the entire tree of questions triggered by the originator question are retrieved, including the originator question's events + + When the limit is smaller than the total number of events, the default behaviour is to return the "tail" of the + event stream for the question (the most recent n events for the question). + + :param str table_id: the full ID of the Google BigQuery table used as the event store e.g. "your-project.your-dataset.your-table" + :param str|None question_uuid: the UUID of a question to get events for + :param str|None parent_question_uuid: the UUID of a parent question to get the sub-question events for + :param str|None originator_question_uuid: the UUID of an originator question get the full tree of events for :param str|None kind: the kind of event to get; if `None`, all event kinds are returned :param bool include_backend_metadata: if `True`, include the service backend metadata + :param bool tail: if `True`, return the most recent events (where a limit applies); e.g. return the most recent 100 log records :param int limit: the maximum number of events to return - :raise ValueError: if the `kind` parameter is invalid - :raise octue.exceptions.ServiceNotFound: if the sender hasn't emitted any events related to the question UUID (or any events at all) - :return list(dict): the events for the question + :return list(dict): the events for the question; this will be empty if there are no events for the question """ - if kind: - if kind not in VALID_EVENT_KINDS: - raise ValueError(f"`kind` must be one of {VALID_EVENT_KINDS!r}; received {kind!r}.") + _validate_inputs(question_uuid, parent_question_uuid, originator_question_uuid, kind) + if question_uuid: + question_uuid_condition = "WHERE question_uuid=@relevant_question_uuid" + elif parent_question_uuid: + question_uuid_condition = "WHERE parent_question_uuid=@relevant_question_uuid" + elif originator_question_uuid: + question_uuid_condition = "WHERE originator_question_uuid=@relevant_question_uuid" + + if kind: event_kind_condition = [f"AND kind={kind!r}"] else: event_kind_condition = [] - client = Client() - - fields = [ - "`event`", - "`kind`", - "`datetime`", - "`uuid`", - "`originator`", - "`sender`", - "`sender_type`", - "`sender_sdk_version`", - "`recipient`", - "`order`", - "`other_attributes`", - ] + # Make a shallow copy of the fields to query. + fields = list(DEFAULT_FIELDS) if include_backend_metadata: - fields.extend(("`backend`", "`backend_metadata`")) + fields.extend(BACKEND_METADATA_FIELDS) - query = "\n".join( + base_query = "\n".join( [ f"SELECT {', '.join(fields)} FROM `{table_id}`", - "WHERE sender=@sender", - "AND question_uuid=@question_uuid", + question_uuid_condition, *event_kind_condition, - "ORDER BY `order`", - "LIMIT @limit", ] ) + if tail: + # Order the inner query to get the most recent events at the top, then reorder the outer query to get the events + # in natural order. + query = "\n".join( + [ + "SELECT * FROM (", + base_query, + "ORDER BY `datetime` DESC", + "LIMIT @limit", + ") ORDER BY `datetime` ASC", + ] + ) + else: + query = "\n".join([base_query, "ORDER BY `datetime` ASC", "LIMIT @limit"]) + + relevant_question_uuid = question_uuid or parent_question_uuid or originator_question_uuid + job_config = QueryJobConfig( query_parameters=[ - ScalarQueryParameter("sender", "STRING", sender), - ScalarQueryParameter("question_uuid", "STRING", question_uuid), + ScalarQueryParameter("relevant_question_uuid", "STRING", relevant_question_uuid), ScalarQueryParameter("limit", "INTEGER", limit), ] ) + client = Client() query_job = client.query(query, job_config=job_config) result = query_job.result() if result.total_rows == 0: - raise ServiceNotFound( - f"No events found. The requested sender {sender!r} may not exist or it hasn't emitted any events for " - f"question {question_uuid!r} (or any events at all)." - ) - - df = result.to_dataframe() - df["event"].apply(_deserialise_manifest_if_present) + return [] - events = df.to_dict(orient="records") + events = [_deserialise_event(event) for event in result] return _unflatten_events(events) -def _deserialise_manifest_if_present(event): - """If the event is a "question" or "result" event and a manifest is present, deserialise the manifest and replace - the serialised manifest with it. +def _validate_inputs(question_uuid, parent_question_uuid, originator_question_uuid, kind): + """Check that only one of `question_uuid`, `parent_question_uuid`, or `originator_question_uuid` are provided and + that the `kind` parameter is a valid event kind. - :param dict event: an Octue service event + :param str|None question_uuid: the UUID of a question to get events for + :param str|None parent_question_uuid: the UUID of a parent question to get the sub-question events for + :param str|None originator_question_uuid: the UUID of an originator question get the full tree of events for + :param str|None kind: the kind of event to get; if `None`, all event kinds are returned + :raise ValueError: if more than one of `question_uuid`, `parent_question_uuid`, or `originator_question_uuid` are provided or the `kind` parameter is invalid :return None: """ - manifest_keys = {"input_manifest", "output_manifest"} + question_uuid_inputs = (bool(question_uuid), bool(parent_question_uuid), bool(originator_question_uuid)) + + if sum(question_uuid_inputs) != 1: + raise ValueError( + "One and only one of `question_uuid`, `parent_question_uuid`, or `originator_question_uuid` must be " + "provided." + ) + + if kind and kind not in VALID_EVENT_KINDS: + raise ValueError(f"`kind` must be one of {VALID_EVENT_KINDS!r}; received {kind!r}.") + + +def _deserialise_event(event): + """Deserialise an event from the event store: + + - Convert "null" to `None` in the `parent_question_uuid` field + - Convert string-cast booleans and integers to `bool` and `int` types + + :param google.cloud.bigquery.table.Row event: a serialised event from the event store + :return dict: the deserialised event + """ + event = dict(event) + + if event["parent_question_uuid"] == "null": + event["parent_question_uuid"] = None + + # Convert string-cast attributes back to `bool` or `int`. + other_attributes = event["other_attributes"] + other_attributes["retry_count"] = int(other_attributes.get("retry_count")) + + if other_attributes.get("forward_logs"): + other_attributes["forward_logs"] = bool(int(other_attributes.get("forward_logs"))) - for key in manifest_keys: - if key in event: - event[key] = Manifest.deserialise(event[key]) - # Only one of the manifest types will be in the event, so return if one is found. - return + return event def _unflatten_events(events): @@ -107,14 +174,17 @@ def _unflatten_events(events): event["event"]["kind"] = event.pop("kind") event["attributes"] = { + "originator_question_uuid": event.pop("originator_question_uuid"), + "parent_question_uuid": event.pop("parent_question_uuid"), + "question_uuid": event.pop("question_uuid"), "datetime": event.pop("datetime").isoformat(), "uuid": event.pop("uuid"), "originator": event.pop("originator"), + "parent": event.pop("parent"), "sender": event.pop("sender"), "sender_type": event.pop("sender_type"), "sender_sdk_version": event.pop("sender_sdk_version"), "recipient": event.pop("recipient"), - "order": event.pop("order"), **event.pop("other_attributes"), } diff --git a/octue/cloud/pub_sub/events.py b/octue/cloud/pub_sub/events.py index 9d0b700b8..f9495b9a3 100644 --- a/octue/cloud/pub_sub/events.py +++ b/octue/cloud/pub_sub/events.py @@ -29,9 +29,17 @@ def extract_event_and_attributes_from_pub_sub_message(message): # Cast attributes to a dictionary to avoid defaultdict-like behaviour from Pub/Sub message attributes container. attributes = dict(getattr_or_subscribe(message, "attributes")) - # Deserialise the `order` and `forward_logs` fields if they're present (don't assume they are before validation). - if attributes.get("order"): - attributes["order"] = int(attributes["order"]) + # Deserialise the `parent_question_uuid`, `forward_logs`, and `retry_count`, fields if they're present + # (don't assume they are before validation). + if attributes.get("parent_question_uuid") == "null": + attributes["parent_question_uuid"] = None + + retry_count = attributes.get("retry_count") + + if retry_count: + attributes["retry_count"] = int(retry_count) + else: + attributes["retry_count"] = None # Required for question events. if attributes.get("sender_type") == "PARENT": @@ -56,34 +64,31 @@ class GoogleCloudPubSubEventHandler(AbstractEventHandler): """A synchronous handler for events received as Google Pub/Sub messages from a pull subscription. :param octue.cloud.pub_sub.subscription.Subscription subscription: the subscription messages are pulled from - :param octue.cloud.pub_sub.service.Service recipient: the `Service` instance that's receiving the events :param callable|None handle_monitor_message: a function to handle monitor messages (e.g. send them to an endpoint for plotting or displaying) - this function should take a single JSON-compatible python primitive :param bool record_events: if `True`, record received events in the `received_events` attribute :param dict|None event_handlers: a mapping of event type names to callables that handle each type of event. The handlers must not mutate the events. :param dict|str schema: the JSON schema to validate events against - :param int|float skip_missing_events_after: the number of seconds after which to skip any events if they haven't arrived but subsequent events have + :param bool include_service_metadata_in_logs: if `True`, include the SRUIDs and question UUIDs of the service revisions involved in the question to the start of the log message :return None: """ def __init__( self, subscription, - recipient, handle_monitor_message=None, record_events=True, event_handlers=None, schema=SERVICE_COMMUNICATION_SCHEMA, - skip_missing_events_after=10, + include_service_metadata_in_logs=True, ): self.subscription = subscription super().__init__( - recipient, handle_monitor_message=handle_monitor_message, record_events=record_events, event_handlers=event_handlers, schema=schema, - skip_missing_events_after=skip_missing_events_after, + include_service_metadata_in_logs=include_service_metadata_in_logs, ) self._subscriber = SubscriberClient() @@ -137,18 +142,33 @@ def handle_events(self, timeout=60, maximum_heartbeat_interval=300): while self._alive: pull_timeout = self._check_timeout_and_get_pull_timeout(timeout) - self._pull_and_enqueue_available_events(timeout=pull_timeout) - result = self._attempt_to_handle_waiting_events() + events = self._pull_available_events(timeout=pull_timeout) + + for event, attributes in events: + # Skip the event if it fails validation. + if not event: + continue + + result = self._handle_event(event, attributes) - if result is not None: - return result + if result: + return result finally: self._heartbeat_checker.cancel() self._subscriber.close() + if self.handled_events: + last_event = self.handled_events[-1] + sender = last_event["attributes"]["sender"] + question_uuid = last_event["attributes"]["question_uuid"] + else: + sender = "UNKNOWN" + question_uuid = "UNKNOWN" + raise TimeoutError( - f"No heartbeat has been received within the maximum allowed interval of {maximum_heartbeat_interval}s." + f"No heartbeat has been received from {sender!r} for question {question_uuid} within the maximum allowed " + f"interval of {maximum_heartbeat_interval}s." ) def _monitor_heartbeat(self, maximum_heartbeat_interval): @@ -187,13 +207,13 @@ def _check_timeout_and_get_pull_timeout(self, timeout): return timeout - total_run_time - def _pull_and_enqueue_available_events(self, timeout): - """Pull as many events from the subscription as are available and enqueue them in `self.waiting_events`, - raising a `TimeoutError` if the timeout is exceeded before succeeding. + def _pull_available_events(self, timeout): + """Pull as many events from the subscription as are available and return them, raising a `TimeoutError` if the + timeout is exceeded before succeeding. :param float|None timeout: how long to wait for the event [s] before raising a `TimeoutError` :raise TimeoutError|concurrent.futures.TimeoutError: if the timeout is exceeded - :return None: + :return list((dict, dict)|(None, None)): a list of event-attributes pairs if the events are valid or `(None, None)` if they're invalid """ pull_start_time = time.perf_counter() attempt = 1 @@ -218,7 +238,7 @@ def _pull_and_enqueue_available_events(self, timeout): raise TimeoutError(f"No message received from {self.subscription.topic!r} after {timeout} seconds.") if not pull_response.received_messages: - return + return [] self._subscriber.acknowledge( request={ @@ -227,8 +247,7 @@ def _pull_and_enqueue_available_events(self, timeout): } ) - for event in pull_response.received_messages: - self._extract_and_enqueue_event(event) + return [self._extract_and_validate_event(event) for event in pull_response.received_messages] def _extract_event_and_attributes(self, container): """Extract an event and its attributes from a Pub/Sub message. diff --git a/octue/cloud/pub_sub/logging.py b/octue/cloud/pub_sub/logging.py index e8d78a6a8..bacfdff84 100644 --- a/octue/cloud/pub_sub/logging.py +++ b/octue/cloud/pub_sub/logging.py @@ -10,19 +10,38 @@ class GoogleCloudPubSubHandler(logging.Handler): :param callable event_emitter: the `_emit_event` method of the service that instantiated this instance :param str question_uuid: the UUID of the question to handle log records for - :param str originator: the SRUID of the service that asked the question these log records are related to + :param str|None parent_question_uuid: the UUID of the question these log records are related to + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question + :param str parent: the SRUID of the parent that asked the question these log records are related to + :param str originator: the SRUID of the service revision that triggered the tree of questions these log records are related to :param str recipient: the SRUID of the service to send these log records to - :param octue.cloud.events.counter.EventCounter order: an event counter keeping track of the order of emitted events + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float timeout: timeout in seconds for attempting to publish each log record :return None: """ - def __init__(self, event_emitter, question_uuid, originator, recipient, order, timeout=60, *args, **kwargs): + def __init__( + self, + event_emitter, + question_uuid, + parent_question_uuid, + originator_question_uuid, + parent, + originator, + recipient, + retry_count, + timeout=60, + *args, + **kwargs + ): super().__init__(*args, **kwargs) self.question_uuid = question_uuid + self.parent_question_uuid = parent_question_uuid + self.originator_question_uuid = originator_question_uuid + self.parent = parent self.originator = originator self.recipient = recipient - self.order = order + self.retry_count = retry_count self.timeout = timeout self._emit_event = event_emitter @@ -38,13 +57,15 @@ def emit(self, record): "kind": "log_record", "log_record": self._convert_log_record_to_primitives(record), }, + parent=self.parent, originator=self.originator, recipient=self.recipient, - order=self.order, - attributes={ - "question_uuid": self.question_uuid, - "sender_type": "CHILD", # The sender type is repeated here as a string to avoid a circular import. - }, + retry_count=self.retry_count, + question_uuid=self.question_uuid, + parent_question_uuid=self.parent_question_uuid, + originator_question_uuid=self.originator_question_uuid, + # The sender type is repeated here as a string to avoid a circular import. + attributes={"sender_type": "CHILD"}, ) except Exception: # noqa diff --git a/octue/cloud/pub_sub/service.py b/octue/cloud/pub_sub/service.py index 69768edf4..8e7175a99 100644 --- a/octue/cloud/pub_sub/service.py +++ b/octue/cloud/pub_sub/service.py @@ -5,7 +5,6 @@ import importlib.metadata import json import logging -import threading import uuid import google.api_core.exceptions @@ -15,7 +14,6 @@ import octue.exceptions from octue.cloud.events import OCTUE_SERVICES_PREFIX -from octue.cloud.events.counter import EventCounter from octue.cloud.events.validation import raise_if_event_is_invalid from octue.cloud.pub_sub import Subscription, Topic from octue.cloud.pub_sub.events import GoogleCloudPubSubEventHandler, extract_event_and_attributes_from_pub_sub_message @@ -37,10 +35,6 @@ logger = logging.getLogger(__name__) -# A lock to ensure only one event can be emitted at a time so that the order is incremented correctly when events are -# being emitted on multiple threads (e.g. via the main thread and a periodic monitor message thread). This avoids 1) -# events overwriting each other in the parent's message handler and 2) events losing their order. -emit_event_lock = threading.Lock() DEFAULT_NAMESPACE = "default" ANSWERS_NAMESPACE = "answers" @@ -72,7 +66,7 @@ class Service: :return None: """ - def __init__(self, backend, service_id=None, run_function=None, name=None, service_registries=None): + def __init__(self, backend, service_id=None, run_function=None, service_registries=None): if service_id is None: self.id = create_sruid(namespace=DEFAULT_NAMESPACE, name=str(uuid.uuid4())) @@ -86,7 +80,6 @@ def __init__(self, backend, service_id=None, run_function=None, name=None, servi self.backend = backend self.run_function = run_function - self.name = name self.service_registries = service_registries self._pub_sub_id = convert_service_id_to_pub_sub_form(self.id) @@ -100,7 +93,7 @@ def __repr__(self): :return str: the service represented as a string """ - return f"<{type(self).__name__}({self.name or self.id!r})>" + return f"<{type(self).__name__}({self.id!r})>" @property def publisher(self): @@ -111,7 +104,10 @@ def publisher(self): :return google.cloud.pubsub_v1.PublisherClient: """ if not self._publisher: - self._publisher = pubsub_v1.PublisherClient(batch_settings=BATCH_SETTINGS) + self._publisher = pubsub_v1.PublisherClient( + batch_settings=BATCH_SETTINGS, + publisher_options=pubsub_v1.types.PublisherOptions(enable_message_ordering=True), + ) return self._publisher @@ -203,40 +199,50 @@ def serve(self, timeout=None, delete_topic_and_subscription_on_exit=False, allow return future, subscriber - def answer(self, question, order=None, heartbeat_interval=120, timeout=30): + def answer(self, question, heartbeat_interval=120, timeout=30): """Answer a question from a parent - i.e. run the child's app on the given data and return the output values. Answers conform to the output values and output manifest schemas specified in the child's Twine file. :param dict|google.cloud.pubsub_v1.subscriber.message.Message question: - :param octue.cloud.events.counter.EventCounter|None order: an event counter keeping track of the order of emitted events :param int|float heartbeat_interval: the time interval, in seconds, at which to send heartbeats :param float|None timeout: time in seconds to keep retrying sending of the answer once it has been calculated :raise Exception: if any exception arises during running analysis and sending its results :return None: """ - order = order or EventCounter() - try: ( question, question_uuid, + parent_question_uuid, + originator_question_uuid, forward_logs, parent_sdk_version, save_diagnostics, + parent, originator, + retry_count, ) = self._parse_question(question) except jsonschema.ValidationError: return heartbeater = None + routing_metadata = { + "question_uuid": question_uuid, + "parent_question_uuid": parent_question_uuid, + "originator_question_uuid": originator_question_uuid, + "parent": parent, + "originator": originator, + "retry_count": retry_count, + } + try: - self._send_delivery_acknowledgment(question_uuid, originator, order) + self._send_delivery_acknowledgment(**routing_metadata) heartbeater = RepeatingTimer( interval=heartbeat_interval, function=self._send_heartbeat, - kwargs={"question_uuid": question_uuid, "originator": originator, "order": order}, + kwargs=routing_metadata, ) heartbeater.daemon = True @@ -245,27 +251,24 @@ def answer(self, question, order=None, heartbeat_interval=120, timeout=30): if forward_logs: analysis_log_handler = GoogleCloudPubSubHandler( event_emitter=self._emit_event, - question_uuid=question_uuid, - originator=originator, - recipient=originator, - order=order, + recipient=parent, + **routing_metadata, ) else: analysis_log_handler = None + handle_monitor_message = functools.partial(self._send_monitor_message, **routing_metadata) + analysis = self.run_function( analysis_id=question_uuid, input_values=question.get("input_values"), input_manifest=question.get("input_manifest"), children=question.get("children"), analysis_log_handler=analysis_log_handler, - handle_monitor_message=functools.partial( - self._send_monitor_message, - question_uuid=question_uuid, - originator=originator, - order=order, - ), + handle_monitor_message=handle_monitor_message, save_diagnostics=save_diagnostics, + originator_question_uuid=originator_question_uuid, + originator=originator, ) result = make_minimal_dictionary(kind="result", output_values=analysis.output_values) @@ -275,11 +278,10 @@ def answer(self, question, order=None, heartbeat_interval=120, timeout=30): self._emit_event( event=result, - originator=originator, - recipient=originator, - order=order, - attributes={"question_uuid": question_uuid, "sender_type": CHILD_SENDER_TYPE}, + recipient=parent, + attributes={"sender_type": CHILD_SENDER_TYPE}, timeout=timeout, + **routing_metadata, ) heartbeater.cancel() @@ -290,7 +292,7 @@ def answer(self, question, order=None, heartbeat_interval=120, timeout=30): heartbeater.cancel() warn_if_incompatible(child_sdk_version=self._local_sdk_version, parent_sdk_version=parent_sdk_version) - self.send_exception(question_uuid, originator, order, timeout=timeout) + self.send_exception(timeout=timeout, **routing_metadata) raise error def ask( @@ -303,8 +305,12 @@ def ask( allow_local_files=False, save_diagnostics="SAVE_DIAGNOSTICS_ON_CRASH", # This is repeated as a string here to avoid a circular import. question_uuid=None, + parent_question_uuid=None, + originator_question_uuid=None, + originator=None, push_endpoint=None, asynchronous=False, + retry_count=0, timeout=86400, ): """Ask a child a question (i.e. send it input values for it to analyse and produce output values for) and return @@ -318,9 +324,13 @@ def ask( :param bool subscribe_to_logs: if `True`, subscribe to the child's logs and handle them with the local log handlers :param bool allow_local_files: if `True`, allow the input manifest to contain references to local files - this should only be set to `True` if the child will be able to access these local files :param str save_diagnostics: must be one of {"SAVE_DIAGNOSTICS_OFF", "SAVE_DIAGNOSTICS_ON_CRASH", "SAVE_DIAGNOSTICS_ON"}; if turned on, allow the input values and manifest (and its datasets) to be saved by the child either all the time or just if it fails while processing them - :param str|None question_uuid: the UUID to use for the question if a specific one is needed; a UUID is generated if not + :param str|None question_uuid: the UUID to use for the question; if `None`, a UUID is generated + :param str|None parent_question_uuid: the UUID of the question that triggered this question; this should be `None` if this is the first question in a question tree + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question; if `None`, this question is assumed to be the originator question + :param str|None originator: the SRUID of the service revision that triggered all ancestor questions of this question; if `None`, this service revision is assumed to the the originator :param str|None push_endpoint: if answers to the question should be pushed to an endpoint, provide its URL here (the returned subscription will be a push subscription); if not, leave this as `None` :param bool asynchronous: if `True` and not using a push endpoint, don't create an answer subscription + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float|None timeout: time in seconds to keep retrying sending the question :return (octue.cloud.pub_sub.subscription.Subscription|None, str): the answer subscription (if the question is synchronous or a push endpoint was used) and question UUID """ @@ -351,6 +361,12 @@ def ask( question_uuid = question_uuid or str(uuid.uuid4()) + # If the originator question UUID isn't provided, assume that this question is the originator question. + originator_question_uuid = originator_question_uuid or question_uuid + + # If the originator isn't provided, assume that this service revision is the originator. + originator = originator or self.id + if asynchronous and not push_endpoint: answer_subscription = None else: @@ -366,7 +382,13 @@ def ask( ), push_endpoint=push_endpoint, ) - answer_subscription.create(allow_existing=False) + + if retry_count > 0: + allow_existing = True + else: + allow_existing = False + + answer_subscription.create(allow_existing=allow_existing) self._send_question( input_values=input_values, @@ -375,7 +397,11 @@ def ask( forward_logs=subscribe_to_logs, save_diagnostics=save_diagnostics, question_uuid=question_uuid, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + originator=originator, recipient=service_id, + retry_count=retry_count, ) return answer_subscription, question_uuid @@ -395,7 +421,6 @@ def wait_for_answer( :param callable|None handle_monitor_message: a function to handle monitor messages (e.g. send them to an endpoint for plotting or displaying) - this function should take a single JSON-compatible python primitive as an argument (note that this could be an array or object) :param bool record_events: if `True`, record messages received from the child in the `received_events` attribute :param float|None timeout: how long in seconds to wait for an answer before raising a `TimeoutError` - :param float|int delivery_acknowledgement_timeout: how long in seconds to wait for a delivery acknowledgement before aborting :param float|int maximum_heartbeat_interval: the maximum amount of time (in seconds) allowed between child heartbeats before an error is raised :raise TimeoutError: if the timeout is exceeded :return dict: dictionary containing the keys "output_values" and "output_manifest" @@ -408,7 +433,6 @@ def wait_for_answer( self._event_handler = GoogleCloudPubSubEventHandler( subscription=subscription, - recipient=self, handle_monitor_message=handle_monitor_message, record_events=record_events, ) @@ -422,12 +446,24 @@ def wait_for_answer( finally: subscription.delete() - def send_exception(self, question_uuid, originator, order, timeout=30): + def send_exception( + self, + question_uuid, + parent_question_uuid, + originator_question_uuid, + parent, + originator, + retry_count, + timeout=30, + ): """Serialise and send the exception being handled to the parent. :param str question_uuid: the UUID of the question this event relates to - :param str originator: the SRUID of the service that asked the question this event is related to - :param octue.cloud.events.counter.EventCounter order: an event counter keeping track of the order of emitted events + :param str|None parent_question_uuid: the UUID of the question that triggered this question + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question + :param str parent: the SRUID of the parent that asked the question this event is related to + :param str originator: the SRUID of the service revision that triggered all ancestor questions of this question + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float|None timeout: time in seconds to keep retrying sending of the exception :return None: """ @@ -441,53 +477,94 @@ def send_exception(self, question_uuid, originator, order, timeout=30): "exception_message": exception_message, "exception_traceback": exception["traceback"], }, + question_uuid=question_uuid, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + parent=parent, originator=originator, - recipient=originator, - order=order, - attributes={"question_uuid": question_uuid, "sender_type": CHILD_SENDER_TYPE}, + recipient=parent, + retry_count=retry_count, + attributes={"sender_type": CHILD_SENDER_TYPE}, timeout=timeout, ) - def _emit_event(self, event, originator, recipient, order, attributes=None, timeout=30): - """Emit a JSON-serialised event as a Pub/Sub message to the services topic with optional message attributes, - incrementing the `order` argument by one. This method is thread-safe. + def _emit_event( + self, + event, + question_uuid, + parent_question_uuid, + originator_question_uuid, + parent, + originator, + recipient, + retry_count, + attributes=None, + timeout=30, + ): + """Emit a JSON-serialised event as a Pub/Sub message to the services topic with optional message attributes. + Extra attributes can be added to an event via the `attributes` argument but the following attributes are always + included: + - `uuid` (event UUID) + - `question_uuid` + - `parent_question_uuid` + - `originator_question_uuid` + - `parent` + - `originator` + - `sender` + - `sender_sdk_version` + - `recipient` + - `retry_count` + - `datetime` :param dict event: JSON-serialisable data to emit as an event - :param str originator: the SRUID of the service that asked the question this event is related to + :param str question_uuid: + :param str|None parent_question_uuid: the UUID of the question that triggered this question + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question + :param str parent: the SRUID of the parent that asked the question this event is related to + :param str originator: the SRUID of the service revision that triggered all ancestor questions of this question :param str recipient: the SRUID of the service the event is intended for - :param octue.cloud.events.counter.EventCounter order: an event counter keeping track of the order of emitted events + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param dict|None attributes: key-value pairs to attach to the event - the values must be strings or bytes :param int|float timeout: the timeout for sending the event in seconds :return google.cloud.pubsub_v1.publisher.futures.Future: """ attributes = attributes or {} - attributes["uuid"] = str(uuid.uuid4()) - attributes["originator"] = originator - attributes["sender"] = self.id - attributes["sender_sdk_version"] = self._local_sdk_version - attributes["recipient"] = recipient - - with emit_event_lock: - attributes["order"] = int(order) - attributes["datetime"] = datetime.datetime.utcnow().isoformat() - converted_attributes = {} - - for key, value in attributes.items(): - if isinstance(value, bool): - value = str(int(value)) - elif isinstance(value, (int, float)): - value = str(value) - - converted_attributes[key] = value - - future = self.publisher.publish( - topic=self.services_topic.path, - data=json.dumps(event, cls=OctueJSONEncoder).encode(), - retry=retry.Retry(deadline=timeout), - **converted_attributes, - ) - order += 1 + attributes.update( + { + "uuid": str(uuid.uuid4()), + "datetime": datetime.datetime.utcnow().isoformat(), + "question_uuid": question_uuid, + "parent_question_uuid": parent_question_uuid, + "originator_question_uuid": originator_question_uuid, + "parent": parent, + "originator": originator, + "sender": self.id, + "sender_sdk_version": self._local_sdk_version, + "recipient": recipient, + "retry_count": retry_count, + } + ) + + converted_attributes = {} + + for key, value in attributes.items(): + if isinstance(value, bool): + value = str(int(value)) + elif isinstance(value, (int, float)): + value = str(value) + elif value is None: + value = json.dumps(value) + + converted_attributes[key] = value + + future = self.publisher.publish( + topic=self.services_topic.path, + data=json.dumps(event, cls=OctueJSONEncoder).encode(), + ordering_key=question_uuid, + retry=retry.Retry(deadline=timeout), + **converted_attributes, + ) return future @@ -499,7 +576,11 @@ def _send_question( forward_logs, save_diagnostics, question_uuid, + parent_question_uuid, + originator_question_uuid, + originator, recipient, + retry_count, timeout=30, ): """Send a question to a child service. @@ -510,7 +591,11 @@ def _send_question( :param bool forward_logs: whether to request the child to forward its logs :param str save_diagnostics: must be one of {"SAVE_DIAGNOSTICS_OFF", "SAVE_DIAGNOSTICS_ON_CRASH", "SAVE_DIAGNOSTICS_ON"}; if turned on, allow the input values and manifest (and its datasets) to be saved by the child either all the time or just if it fails while processing them :param str question_uuid: the UUID of the question being sent + :param str|None parent_question_uuid: the UUID of the question that triggered this question + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question + :param str originator: the SRUID of the service revision that triggered all ancestor questions of this question :param str recipient: the SRUID of the child the question is intended for + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float timeout: time in seconds after which to give up sending :return None: """ @@ -522,85 +607,131 @@ def _send_question( future = self._emit_event( event=question, - timeout=timeout, - originator=self.id, + question_uuid=question_uuid, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + parent=self.id, + originator=originator, recipient=recipient, - order=EventCounter(), + retry_count=retry_count, attributes={ - "question_uuid": question_uuid, "forward_logs": forward_logs, "save_diagnostics": save_diagnostics, "sender_type": PARENT_SENDER_TYPE, }, + timeout=timeout, ) # Await successful publishing of the question. future.result() logger.info("%r asked a question %r to service %r.", self, question_uuid, recipient) - def _send_delivery_acknowledgment(self, question_uuid, originator, order, timeout=30): + def _send_delivery_acknowledgment( + self, + question_uuid, + parent_question_uuid, + originator_question_uuid, + parent, + originator, + retry_count, + timeout=30, + ): """Send an acknowledgement of question receipt to the parent. :param str question_uuid: the UUID of the question this event relates to - :param str originator: the SRUID of the service that asked the question this event is related to - :param octue.cloud.events.counter.EventCounter order: an event counter keeping track of the order of emitted events + :param str|None parent_question_uuid: the UUID of the question that triggered this question + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question + :param str parent: the SRUID of the service that asked the question this event is related to + :param str originator: the SRUID of the service revision that triggered all ancestor questions of this question + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float timeout: time in seconds after which to give up sending :return None: """ self._emit_event( - { - "kind": "delivery_acknowledgement", - "datetime": datetime.datetime.utcnow().isoformat(), - }, + {"kind": "delivery_acknowledgement"}, + question_uuid=question_uuid, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, timeout=timeout, + parent=parent, originator=originator, - recipient=originator, - order=order, - attributes={"question_uuid": question_uuid, "sender_type": CHILD_SENDER_TYPE}, + recipient=parent, + retry_count=retry_count, + attributes={"sender_type": CHILD_SENDER_TYPE}, ) logger.info("%r acknowledged receipt of question %r.", self, question_uuid) - def _send_heartbeat(self, question_uuid, originator, order, timeout=30): + def _send_heartbeat( + self, + question_uuid, + parent_question_uuid, + originator_question_uuid, + parent, + originator, + retry_count, + timeout=30, + ): """Send a heartbeat to the parent, indicating that the service is alive. :param str question_uuid: the UUID of the question this event relates to - :param str originator: the SRUID of the service that asked the question this event is related to - :param octue.cloud.events.counter.EventCounter order: an event counter keeping track of the order of emitted events + :param str|None parent_question_uuid: the UUID of the question that triggered this question + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question + :param str parent: the SRUID of the parent that asked the question this event is related to + :param str originator: the SRUID of the service revision that triggered all ancestor questions of this question + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float timeout: time in seconds after which to give up sending :return None: """ self._emit_event( - { - "kind": "heartbeat", - "datetime": datetime.datetime.utcnow().isoformat(), - }, + {"kind": "heartbeat"}, + question_uuid=question_uuid, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + parent=parent, originator=originator, - recipient=originator, - order=order, + recipient=parent, + retry_count=retry_count, + attributes={"sender_type": CHILD_SENDER_TYPE}, timeout=timeout, - attributes={"question_uuid": question_uuid, "sender_type": CHILD_SENDER_TYPE}, ) logger.debug("Heartbeat sent by %r.", self) - def _send_monitor_message(self, data, question_uuid, originator, order, timeout=30): + def _send_monitor_message( + self, + data, + question_uuid, + parent_question_uuid, + originator_question_uuid, + parent, + originator, + retry_count, + timeout=30, + ): """Send a monitor message to the parent. :param any data: the data to send as a monitor message :param str question_uuid: the UUID of the question this event relates to - :param str originator: the SRUID of the service that asked the question this event is related to - :param octue.cloud.events.counter.EventCounter order: an event counter keeping track of the order of emitted events + :param str|None parent_question_uuid: the UUID of the question that triggered this question + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question + :param str parent: the SRUID of the service that asked the question this event is related to + :param str originator: the SRUID of the service revision that triggered all ancestor questions of this question + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float timeout: time in seconds to retry sending the message :return None: """ self._emit_event( {"kind": "monitor_message", "data": data}, + question_uuid=question_uuid, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + parent=parent, originator=originator, - recipient=originator, - order=order, + recipient=parent, + retry_count=retry_count, timeout=timeout, - attributes={"question_uuid": question_uuid, "sender_type": CHILD_SENDER_TYPE}, + attributes={"sender_type": CHILD_SENDER_TYPE}, ) logger.debug("Monitor message sent by %r.", self) @@ -609,7 +740,7 @@ def _parse_question(self, question): """Parse a question in the Google Cloud Run or Google Pub/Sub format. :param dict|google.cloud.pubsub_v1.subscriber.message.Message question: the question to parse in Google Cloud Run or Google Pub/Sub format - :return (dict, str, bool, str, str, str): the question's event and its attributes (question UUID, whether to forward logs, the Octue SDK version of the parent, whether to save diagnostics, and the SRUID of the service revision that asked the question) + :return (dict, str, str, str, bool, str, str, str, str, int): the question's event and its attributes (question UUID, parent question UUID, originator question UUID, whether to forward logs, the Octue SDK version of the parent, whether to save diagnostics, the SRUID of the parent that asked the question, the SRUID of the service revision that triggered all ancestor questions of this question, and the retry count) """ logger.info("%r received a question.", self) @@ -623,7 +754,7 @@ def _parse_question(self, question): raise_if_event_is_invalid( event=event_for_validation, attributes=attributes, - recipient=self, + recipient=self.id, # Don't assume the presence of specific attributes before validation. parent_sdk_version=attributes.get("sender_sdk_version"), child_sdk_version=importlib.metadata.version("octue"), @@ -631,11 +762,18 @@ def _parse_question(self, question): logger.info("%r parsed question %r successfully.", self, attributes["question_uuid"]) + if attributes["retry_count"] > 0: + logger.warning("This is retry %d for question %r.", attributes["retry_count"], attributes["question_uuid"]) + return ( event, attributes["question_uuid"], + attributes["parent_question_uuid"], + attributes["originator_question_uuid"], attributes["forward_logs"], attributes["sender_sdk_version"], attributes["save_diagnostics"], + attributes["parent"], attributes["originator"], + attributes["retry_count"], ) diff --git a/octue/cloud/pub_sub/subscription.py b/octue/cloud/pub_sub/subscription.py index 53f06f6fd..735238d02 100644 --- a/octue/cloud/pub_sub/subscription.py +++ b/octue/cloud/pub_sub/subscription.py @@ -33,6 +33,7 @@ class Subscription: :param float minimum_retry_backoff: minimum number of seconds after the acknowledgement deadline has passed to exponentially retry delivering a message to the subscription :param float maximum_retry_backoff: maximum number of seconds after the acknowledgement deadline has passed to exponentially retry delivering a message to the subscription :param str|None push_endpoint: if this is a push subscription, this is the URL to which messages should be pushed; leave as `None` if it's not a push subscription + :param bool enable_message_ordering: if `True`, receive messages with the same ordering key in the order they were published :return None: """ @@ -48,6 +49,7 @@ def __init__( minimum_retry_backoff=10, maximum_retry_backoff=600, push_endpoint=None, + enable_message_ordering=True, ): self.name = name self.topic = topic @@ -69,6 +71,7 @@ def __init__( ) self.push_endpoint = push_endpoint + self.enable_message_ordering = enable_message_ordering self._subscriber = SubscriberClient() self._created = False @@ -137,7 +140,13 @@ def update(self): mapping=None, subscription=self._create_proto_message_subscription(), # noqa update_mask=FieldMask( - paths=["ack_deadline_seconds", "message_retention_duration", "expiration_policy", "retry_policy"] + paths=[ + "ack_deadline_seconds", + "message_retention_duration", + "enable_message_ordering", + "expiration_policy", + "retry_policy", + ] ), ) ) @@ -189,6 +198,7 @@ def _create_proto_message_subscription(self): filter=self.filter, # noqa ack_deadline_seconds=self.ack_deadline, # noqa message_retention_duration=self.message_retention_duration, # noqa + enable_message_ordering=self.enable_message_ordering, # noqa expiration_policy=self.expiration_policy, # noqa retry_policy=self.retry_policy, # noqa **options, diff --git a/octue/cloud/storage/client.py b/octue/cloud/storage/client.py index f21ad0385..956f4802f 100644 --- a/octue/cloud/storage/client.py +++ b/octue/cloud/storage/client.py @@ -13,6 +13,7 @@ from google.auth.transport import requests as google_requests from google.cloud.storage import Client from google.cloud.storage.constants import _DEFAULT_TIMEOUT +from google.cloud.storage.retry import DEFAULT_RETRY from google_crc32c import Checksum from octue.cloud import storage @@ -28,7 +29,8 @@ class GoogleCloudStorageClient: - """A client for using Google Cloud Storage. + """A client for using Google Cloud Storage. Versioning and metadata versioning (generations and metagenerations) + aren't supported, so the default retry strategy is used throughout. :param str|google.auth.credentials.Credentials|None credentials: :return None: @@ -87,7 +89,7 @@ def upload_file(self, local_path, cloud_path, metadata=None, timeout=_DEFAULT_TI if metadata: blob.metadata = self._encode_metadata(metadata) - blob.upload_from_filename(filename=local_path, timeout=timeout) + blob.upload_from_filename(filename=local_path, timeout=timeout, retry=DEFAULT_RETRY) logger.debug("Uploaded %r to Google Cloud at %r.", local_path, blob.public_url) def upload_from_string(self, string, cloud_path, metadata=None, timeout=_DEFAULT_TIMEOUT): @@ -106,7 +108,7 @@ def upload_from_string(self, string, cloud_path, metadata=None, timeout=_DEFAULT if metadata: blob.metadata = self._encode_metadata(metadata) - blob.upload_from_string(data=string, timeout=timeout) + blob.upload_from_string(data=string, timeout=timeout, retry=DEFAULT_RETRY) logger.debug("Uploaded data to Google Cloud at %r.", blob.public_url) def get_metadata(self, cloud_path, timeout=_DEFAULT_TIMEOUT): @@ -155,7 +157,7 @@ def overwrite_custom_metadata(self, cloud_path, metadata=None): """ blob = self._blob(cloud_path) blob.metadata = self._encode_metadata(metadata or {}) - blob.patch() + blob.patch(retry=DEFAULT_RETRY) def download_to_file(self, local_path, cloud_path, timeout=_DEFAULT_TIMEOUT): """Download a file to a file from a Google Cloud bucket at gs:///. @@ -178,38 +180,42 @@ def download_all_files(self, local_path, cloud_path, filter=None, recursive=Fals :param str cloud_path: the path to a cloud storage directory to download :param callable|None filter: an optional callable to filter which files are downloaded from the cloud path; the callable should take a blob as its only positional argument :param bool recursive: if `True`, also download all files in all subdirectories of the cloud directory recursively - :return None: + :return list(str): the list of paths the files were downloaded to """ bucket, _ = self._get_bucket_and_path_in_bucket(cloud_path) - cloud_and_local_paths = [ - { - "cloud_path": storage.path.generate_gs_path(bucket.name, blob.name), - "local_path": os.path.join( + cloud_paths = [] + local_paths = [] + + for blob in self.scandir(cloud_path, filter=filter, recursive=recursive): + cloud_paths.append(storage.path.generate_gs_path(bucket.name, blob.name)) + + local_paths.append( + os.path.join( local_path, storage.path.relpath( storage.path.generate_gs_path(bucket.name, blob.name), cloud_path, ), - ), - } - for blob in self.scandir(cloud_path, filter=filter, recursive=recursive) - ] + ) + ) - if not cloud_and_local_paths: + if not cloud_paths: logger.warning( "Attempted to download files from %r but it appears empty. Please check this is the correct path.", cloud_path, ) - return + return [] - def download_file(cloud_and_local_path): - self.download_to_file(cloud_and_local_path["local_path"], cloud_and_local_path["cloud_path"]) + def download_file(local_and_cloud_path): + self.download_to_file(local_and_cloud_path[0], local_and_cloud_path[1]) with concurrent.futures.ThreadPoolExecutor() as executor: - for path in executor.map(download_file, cloud_and_local_paths): + for path in executor.map(download_file, zip(local_paths, cloud_paths)): logger.debug("Downloaded file to %r.", path) + return local_paths + def download_as_string(self, cloud_path, timeout=_DEFAULT_TIMEOUT): """Download a file to a string from a Google Cloud bucket at gs:///. @@ -233,7 +239,15 @@ def copy(self, original_cloud_path, destination_cloud_path, timeout=_DEFAULT_TIM blob = self._blob(original_cloud_path) original_bucket, _ = self._get_bucket_and_path_in_bucket(original_cloud_path) destination_bucket, path_in_destination_bucket = self._get_bucket_and_path_in_bucket(destination_cloud_path) - original_bucket.copy_blob(blob, destination_bucket, new_name=path_in_destination_bucket, timeout=timeout) + + original_bucket.copy_blob( + blob, + destination_bucket, + new_name=path_in_destination_bucket, + timeout=timeout, + retry=DEFAULT_RETRY, + ) + logger.debug("Copied %r to %r.", original_cloud_path, destination_cloud_path) def delete(self, cloud_path, timeout=_DEFAULT_TIMEOUT): @@ -244,7 +258,7 @@ def delete(self, cloud_path, timeout=_DEFAULT_TIMEOUT): :return None: """ blob = self._blob(cloud_path) - blob.delete(timeout=timeout) + blob.delete(timeout=timeout, retry=DEFAULT_RETRY) logger.debug("Deleted %r from Google Cloud.", blob.public_url) def scandir( diff --git a/octue/configuration.py b/octue/configuration.py index c5f487013..25d061d8b 100644 --- a/octue/configuration.py +++ b/octue/configuration.py @@ -8,6 +8,9 @@ logger = logging.getLogger(__name__) +DEFAULT_SERVICE_CONFIGURATION_PATH = "octue.yaml" + + class ServiceConfiguration: """A class containing the details needed to configure a service. @@ -16,8 +19,10 @@ class ServiceConfiguration: :param str app_source_path: the path to the directory containing the app's source code :param str twine_path: the path to the twine file defining the schema for input, output, and configuration data for the service :param str|None app_configuration_path: the path to the app configuration file containing configuration data for the service; if this is `None`, the default application configuration is used - :param str|None diagnostics_cloud_path: the path to a cloud directory to store diagnostics (this includes the configuration, input values and manifest, and logs) + :param str|None diagnostics_cloud_path: the path to a cloud directory to store diagnostics (this includes the configuration, input values and manifest, and logs for each question) :param iter(dict)|None service_registries: the names and endpoints of the registries used to resolve service revisions when asking questions; these should be in priority order (highest priority first) + :param str|None event_store_table_id: the full ID of the Google BigQuery table used as the event store e.g. "your-project.your-dataset.your-table" + :param bool delete_local_files: if `True`, delete any files downloaded and temporary directories created during an analysis once it's finished :param str|None directory: if provided, find the app source, twine, and app configuration relative to this directory :return None: """ @@ -31,6 +36,8 @@ def __init__( app_configuration_path=None, diagnostics_cloud_path=None, service_registries=None, + event_store_table_id=None, + delete_local_files=False, directory=None, **kwargs, ): @@ -38,6 +45,8 @@ def __init__( self.namespace = namespace self.diagnostics_cloud_path = diagnostics_cloud_path self.service_registries = service_registries + self.event_store_table_id = event_store_table_id + self.delete_local_files = delete_local_files if directory: directory = os.path.abspath(directory) @@ -67,12 +76,14 @@ def __init__( logger.warning(f"The following keyword arguments were not used by {type(self).__name__}: {kwargs!r}.") @classmethod - def from_file(cls, path): - """Load a service configuration from a file. + def from_file(cls, path=None): + """Load a service configuration from a YAML file. - :param str path: - :return ServiceConfiguration: + :param str|None path: the path to the service configuration YAML file; if not provided, the `OCTUE_SERVICE_CONFIGURATION_PATH` environment variable is used if present, otherwise the local path `octue.yaml` is used + :return ServiceConfiguration: the service configuration loaded from the file """ + path = path or os.environ.get("OCTUE_SERVICE_CONFIGURATION_PATH", DEFAULT_SERVICE_CONFIGURATION_PATH) + with open(path) as f: raw_service_configuration = yaml.load(f, Loader=yaml.SafeLoader) @@ -82,6 +93,13 @@ def from_file(cls, path): # Ignore services other than the first for now. return cls(**raw_service_configuration["services"][0], directory=os.path.dirname(absolute_path)) + def __repr__(self): + """Represent the service configuration as a string. + + :return str: the service configuration as a string + """ + return f"<{type(self).__name__}('{self.namespace}/{self.name}')>" + class AppConfiguration: """A class containing the configuration data needed to start an app as a service. The configuration data should @@ -124,12 +142,12 @@ def from_file(cls, path): return cls(**raw_app_configuration) -def load_service_and_app_configuration(service_configuration_path): +def load_service_and_app_configuration(service_configuration_path=None): """Load the service configuration from the given YAML file and the app configuration referenced in it. If no app configuration is referenced, an empty one is returned. - :param str service_configuration_path: path to service configuration file - :return (octue.configuration.ServiceConfiguration, octue.configuration.AppConfiguration): + :param str|None service_configuration_path: the path to the service configuration YAML file; if not provided, the `OCTUE_SERVICE_CONFIGURATION_PATH` environment variable is used if present, otherwise the local path `octue.yaml` is used + :return (octue.configuration.ServiceConfiguration, octue.configuration.AppConfiguration): the service configuration loaded from the YAML file and the app configuration specified by the service configuration (or an empty app configuration if none is specified) """ service_configuration = ServiceConfiguration.from_file(service_configuration_path) app_configuration = AppConfiguration() diff --git a/octue/diagnostics.py b/octue/diagnostics.py index 6ee72e342..06cfbc8cc 100644 --- a/octue/diagnostics.py +++ b/octue/diagnostics.py @@ -102,7 +102,7 @@ def _upload(self): :return None: """ question_diagnostics_path = storage.path.join(self.cloud_path, self.analysis_id) - logger.warning("App failed - saving diagnostics to %r.", question_diagnostics_path) + logger.info("Saving diagnostics to %r.", question_diagnostics_path) for data_type in ("configuration", "input"): values_type = f"{data_type}_values" diff --git a/octue/log_handlers.py b/octue/log_handlers.py index 2e5be01f9..1a50e1e44 100644 --- a/octue/log_handlers.py +++ b/octue/log_handlers.py @@ -228,12 +228,12 @@ def __init__(self, analysis_id, logger, analysis_log_level, extra_log_handlers=N # Create formatters that include the analysis ID in the logging metadata. self.coloured_analysis_formatter = create_octue_formatter( get_log_record_attributes_for_environment(), - [f"analysis-{self.analysis_id}"], + [self.analysis_id], ) self.uncoloured_analysis_formatter = create_octue_formatter( get_log_record_attributes_for_environment(), - [f"analysis-{self.analysis_id}"], + [self.analysis_id], use_colour=False, ) diff --git a/octue/metadata/version_compatibilities.json b/octue/metadata/version_compatibilities.json index fcaeed382..5f938de31 100644 --- a/octue/metadata/version_compatibilities.json +++ b/octue/metadata/version_compatibilities.json @@ -36,7 +36,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.40.1": { "0.40.1": true, @@ -75,7 +76,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.40.2": { "0.41.0": true, @@ -114,7 +116,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.41.0": { "0.41.0": true, @@ -153,7 +156,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.41.1": { "0.41.1": true, @@ -192,7 +196,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.42.0": { "0.42.0": true, @@ -231,7 +236,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.42.1": { "0.43.2": true, @@ -270,7 +276,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.0": { "0.43.2": true, @@ -309,7 +316,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.1": { "0.43.2": true, @@ -348,7 +356,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.2": { "0.43.2": true, @@ -387,7 +396,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.3": { "0.43.3": true, @@ -426,7 +436,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.4": { "0.43.4": true, @@ -465,7 +476,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.5": { "0.43.5": true, @@ -504,7 +516,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.6": { "0.43.6": true, @@ -543,7 +556,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.43.7": { "0.43.7": true, @@ -582,7 +596,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.44.0": { "0.44.0": true, @@ -621,7 +636,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.45.0": { "0.45.0": true, @@ -660,7 +676,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.46.0": { "0.46.0": true, @@ -699,7 +716,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.46.1": { "0.46.1": true, @@ -738,7 +756,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.46.2": { "0.46.2": true, @@ -777,7 +796,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.46.3": { "0.46.3": true, @@ -816,7 +836,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.47.0": { "0.47.0": true, @@ -855,7 +876,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.47.1": { "0.47.1": true, @@ -894,7 +916,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.47.2": { "0.47.2": true, @@ -933,7 +956,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.48.0": { "0.48.0": true, @@ -972,7 +996,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.49.0": { "0.49.1": true, @@ -1011,7 +1036,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.49.1": { "0.49.1": true, @@ -1050,7 +1076,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.49.2": { "0.49.2": true, @@ -1089,7 +1116,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.50.0": { "0.50.0": true, @@ -1128,7 +1156,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.50.1": { "0.51.0": false, @@ -1167,7 +1196,8 @@ "0.52.2": false, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.51.0": { "0.51.0": true, @@ -1206,7 +1236,8 @@ "0.52.2": true, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.52.0": { "0.51.0": true, @@ -1245,7 +1276,8 @@ "0.52.2": true, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.52.1": { "0.51.0": true, @@ -1284,7 +1316,8 @@ "0.52.2": true, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.52.2": { "0.51.0": true, @@ -1323,7 +1356,8 @@ "0.52.2": true, "0.53.0": false, "0.54.0": false, - "0.55.0": false + "0.55.0": false, + "0.56.0": false }, "0.53.0": { "0.51.0": false, @@ -1362,7 +1396,8 @@ "0.52.2": false, "0.53.0": true, "0.54.0": true, - "0.55.0": true + "0.55.0": true, + "0.56.0": false }, "0.54.0": { "0.51.0": false, @@ -1401,7 +1436,8 @@ "0.52.2": false, "0.53.0": true, "0.54.0": true, - "0.55.0": true + "0.55.0": true, + "0.56.0": false }, "0.55.0": { "0.51.0": false, @@ -1440,6 +1476,47 @@ "0.52.2": false, "0.53.0": true, "0.54.0": true, - "0.55.0": true + "0.55.0": true, + "0.56.0": false + }, + "0.56.0": { + "0.51.0": false, + "0.50.1": false, + "0.50.0": false, + "0.49.2": false, + "0.49.1": false, + "0.49.0": false, + "0.48.0": false, + "0.47.2": false, + "0.47.1": false, + "0.47.0": false, + "0.46.3": false, + "0.46.2": false, + "0.46.1": false, + "0.46.0": false, + "0.45.0": false, + "0.44.0": false, + "0.43.7": false, + "0.43.6": false, + "0.43.5": false, + "0.43.4": false, + "0.43.3": false, + "0.43.2": false, + "0.43.1": false, + "0.43.0": false, + "0.42.1": false, + "0.42.0": false, + "0.41.1": false, + "0.41.0": false, + "0.40.2": false, + "0.40.1": false, + "0.40.0": false, + "0.52.0": false, + "0.52.1": false, + "0.52.2": false, + "0.53.0": false, + "0.54.0": false, + "0.55.0": false, + "0.56.0": true } } diff --git a/octue/mixins/metadata.py b/octue/mixins/metadata.py index 05dd99658..721613c4d 100644 --- a/octue/mixins/metadata.py +++ b/octue/mixins/metadata.py @@ -7,6 +7,15 @@ class Metadata: _METADATA_ATTRIBUTES = tuple() + @property + @abstractmethod + def metadata_path(self): + """Get the path to the instance's local metadata file if it has one. + + :return str|None: + """ + pass + @property def metadata_hash_value(self): """Get the hash of the instance's metadata, not including its ID. diff --git a/octue/resources/child.py b/octue/resources/child.py index c0f30bd67..4169c568b 100644 --- a/octue/resources/child.py +++ b/octue/resources/child.py @@ -2,6 +2,7 @@ import copy import logging import os +import uuid from octue.cloud.pub_sub.service import Service from octue.resources import service_backends @@ -18,12 +19,12 @@ class Child: :param str id: the ID of the child :param dict backend: must include the key "name" with a value of the name of the type of backend e.g. "GCPPubSubBackend" and key-value pairs for any other parameters the chosen backend expects - :param str internal_service_name: the name to give to the internal service used to ask questions to the child + :param str internal_sruid: the SRUID to give to the internal service used to ask questions to the child :param iter(dict)|None service_registries: the names and endpoints of the registries used to resolve the child's service revision when asking it questions; these should be in priority order (highest priority first) :return None: """ - def __init__(self, id, backend, internal_service_name="local/local:local", service_registries=None): + def __init__(self, id, backend, internal_sruid="local/local:local", service_registries=None): self.id = id backend = copy.deepcopy(backend) @@ -31,7 +32,7 @@ def __init__(self, id, backend, internal_service_name="local/local:local", servi backend = service_backends.get_backend(backend_type_name)(**backend) self._service = BACKEND_TO_SERVICE_MAPPING[backend_type_name]( - name=internal_service_name, + service_id=internal_sruid, backend=backend, service_registries=service_registries, ) @@ -63,8 +64,12 @@ def ask( record_events=True, save_diagnostics="SAVE_DIAGNOSTICS_ON_CRASH", # This is repeated as a string here to avoid a circular import. question_uuid=None, + parent_question_uuid=None, + originator_question_uuid=None, + originator=None, push_endpoint=None, asynchronous=False, + retry_count=0, timeout=86400, maximum_heartbeat_interval=300, ): @@ -83,8 +88,12 @@ def ask( :param bool record_events: if `True`, record messages received from the child in the `received_events` property :param str save_diagnostics: must be one of {"SAVE_DIAGNOSTICS_OFF", "SAVE_DIAGNOSTICS_ON_CRASH", "SAVE_DIAGNOSTICS_ON"}; if turned on, allow the input values and manifest (and its datasets) to be saved by the child either all the time or just if it fails while processing them :param str|None question_uuid: the UUID to use for the question if a specific one is needed; a UUID is generated if not + :param str|None parent_question_uuid: the UUID of the question that triggered this question + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this question; if `None`, this question is assumed to be the originator question + :param str|None originator: the SRUID of the service revision that triggered all ancestor questions of this question; if `None`, this service revision is assumed to be the originator :param str|None push_endpoint: if answers to the question should be pushed to an endpoint, provide its URL here (the returned subscription will be a push subscription); if not, leave this as `None` :param bool asynchronous: if `True`, don't wait for an answer or create an answer subscription (the result and other events can be retrieved from the event store later) + :param int retry_count: the retry count of the question (this is zero if it's the first attempt at the question) :param float timeout: time in seconds to wait for an answer before raising a timeout error :param float|int maximum_heartbeat_interval: the maximum amount of time (in seconds) allowed between child heartbeats before an error is raised :raise TimeoutError: if the timeout is exceeded while waiting for an answer @@ -99,8 +108,12 @@ def ask( allow_local_files=allow_local_files, save_diagnostics=save_diagnostics, question_uuid=question_uuid, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + originator=originator, push_endpoint=push_endpoint, asynchronous=asynchronous, + retry_count=retry_count, timeout=timeout, ) @@ -117,7 +130,15 @@ def ask( return answer, question_uuid - def ask_multiple(self, *questions, raise_errors=True, max_retries=0, prevent_retries_when=None, max_workers=None): + def ask_multiple( + self, + *questions, + raise_errors=True, + max_retries=0, + prevent_retries_when=None, + max_workers=None, + log_errors=True, + ): """Ask the child multiple questions in parallel and wait for the answers. Each question should be provided as a dictionary of `Child.ask` keyword arguments. If `raise_errors` is `True`, an error is raised and no answers are returned if any of the individual questions raise an error; if it's `False`, answers are returned for all @@ -127,7 +148,8 @@ def ask_multiple(self, *questions, raise_errors=True, max_retries=0, prevent_ret :param bool raise_errors: if `True`, an error is raised and no answers are returned if any of the individual questions raise an error; if `False`, answers are returned for all successful questions while errors are returned unraised for any failed ones :param int max_retries: retry any questions that failed up to this number of times (note: this will have no effect unless `raise_errors=False`) :param list(type)|None prevent_retries_when: prevent retrying any questions that fail with an exception type in this list (note: this will have no effect unless `raise_errors=False`) - :param int|None max_workers: the maximum number of questions that can be asked at once; defaults to `min(32, os.cpu_count() + 4, len(questions))` (see `concurrent.futures.ThreadPoolExecutor`) + :param int|None max_workers: the maximum number of questions that can be asked at once; defaults to the lowest of {32, no. of CPUs + 4, and no. of questions} (see `concurrent.futures.ThreadPoolExecutor`) + :param bool log_errors: if `True` and `raise_errors=False`, log any errors remaining once retries are exhausted :raise ValueError: if the maximum number of parallel questions is set too high :raise Exception: if any question raises an error if `raise_errors` is `True` :return list(dict|Exception, str): the answers or caught errors of the questions, and the question UUIDs (in the same order as asked) @@ -136,16 +158,23 @@ def ask_multiple(self, *questions, raise_errors=True, max_retries=0, prevent_ret # Answers will come out of order, so use a dictionary to store them against their questions' original index. answers = {} - max_workers = max_workers or min(32, os.cpu_count() + 4, len(questions)) - logger.info("Asking %d questions.", len(questions)) + n_questions = len(questions) + max_workers = max_workers or min(32, (os.cpu_count() or 1) + 4, n_questions) + logger.info("Asking %d questions with maximum %d threads.", n_questions, max_workers) with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: - future_to_question_index_mapping = { - executor.submit(self.ask, **question): i for i, question in enumerate(questions) - } + future_to_question_index_mapping = {} + + for i, question in enumerate(questions): + # Add a question UUID if not set so retries have the same one. + if "question_uuid" not in question: + question["question_uuid"] = str(uuid.uuid4()) + + future = executor.submit(self.ask, **question) + future_to_question_index_mapping[future] = i for i, future in enumerate(concurrent.futures.as_completed(future_to_question_index_mapping)): - logger.info("%d of %d answers received.", i + 1, len(questions)) + logger.info("%d of %d answers received.", i + 1, n_questions) question_index = future_to_question_index_mapping[future] try: @@ -154,24 +183,66 @@ def ask_multiple(self, *questions, raise_errors=True, max_retries=0, prevent_ret if raise_errors: raise e - answers[question_index] = e - logger.exception("Question %d failed.", question_index) + question_uuid = questions[question_index]["question_uuid"] + answers[question_index] = (e, question_uuid) - for retry in range(max_retries): - failed_questions = {} + logger.error( + "Question %s failed. Run 'octue get-diagnostics gs:///%s " + "--download-datasets' to get the crash diagnostics.", + question_uuid, + question_uuid, + ) - for question_index, answer in answers.items(): - if isinstance(answer, Exception) and type(answer) not in prevent_retries_when: - failed_questions[question_index] = questions[question_index] + for retry in range(max_retries): + failed_questions = self._get_failed_questions( + questions, + answers, + prevent_retries_when, + increment_retry_count=True, + ) if not failed_questions: break - logger.info("%d questions failed - retrying.", len(failed_questions)) - retried_answers = self.ask_multiple(*failed_questions.values(), raise_errors=False) + logger.info("%d questions failed - retrying %d of %d times.", len(failed_questions), retry + 1, max_retries) + retried_answers = self.ask_multiple(*failed_questions.values(), raise_errors=False, log_errors=False) for question_index, answer in zip(failed_questions.keys(), retried_answers): answers[question_index] = answer + if log_errors: + # Check for failed questions after retries completed. + failed_questions = self._get_failed_questions(questions, answers, prevent_retries_when) + + for question_index, question in failed_questions.items(): + logger.error( + "Question %s failed after %d retries (see below for error).", + question["question_uuid"], + max_retries, + exc_info=answers[question_index][0], + ) + # Convert dictionary to list in asking order. return [answer[1] for answer in sorted(answers.items(), key=lambda item: item[0])] + + def _get_failed_questions(self, questions, answers, prevent_retries_when, increment_retry_count=False): + """Get the questions that failed. + + :param list(dict) questions: the list of questions that were asked + :param dict answers: a mapping of question index (i.e. position in the original list of questions) to question answer + :param list(type)|None prevent_retries_when: prevent retrying any questions that fail with an exception type in this list (note: this will have no effect unless `raise_errors=False`) + :param bool increment_retry_count: if `True`, increment the question retry count by 1 + :return dict: a mapping of failed question index (i.e. position in the original list of questions) to failed question + """ + failed_questions = {} + + for question_index, answer in answers.items(): + if isinstance(answer[0], Exception) and type(answer[0]) not in prevent_retries_when: + question = questions[question_index] + + if increment_retry_count: + question["retry_count"] = question.get("retry_count", 0) + 1 + + failed_questions[question_index] = question + + return failed_questions diff --git a/octue/resources/datafile.py b/octue/resources/datafile.py index 8d39e5d46..b7d0f1bd0 100644 --- a/octue/resources/datafile.py +++ b/octue/resources/datafile.py @@ -173,6 +173,17 @@ def cloud_hash_value(self): """ return self._cloud_metadata.get("crc32c") + @property + def metadata_path(self): + """Get the path to the datafile's local metadata file (if the datafile exists locally). + + :return str|None: + """ + if not self.exists_locally: + return None + + return os.path.join(os.path.dirname(self._local_path), METADATA_FILENAME) + @property def timestamp(self): """Get the timestamp of the datafile. @@ -307,17 +318,6 @@ def open(self): """ return functools.partial(_DatafileContextManager, self) - @property - def _local_metadata_path(self): - """Get the path to the datafile's local metadata file (if the datafile exists locally). - - :return str|None: - """ - if not self.exists_locally: - return None - - return os.path.join(os.path.dirname(self._local_path), METADATA_FILENAME) - def __enter__(self): self._open_context_manager = self.open(**self._open_attributes) return self, self._open_context_manager.__enter__() @@ -462,7 +462,7 @@ def update_local_metadata(self): :return None: """ - with UpdateLocalMetadata(self._local_metadata_path) as existing_metadata_records: + with UpdateLocalMetadata(self) as existing_metadata_records: if not existing_metadata_records.get("datafiles"): existing_metadata_records["datafiles"] = {} @@ -596,7 +596,7 @@ def _use_local_metadata(self): :return None: """ - existing_metadata_records = load_local_metadata_file(self._local_metadata_path) + existing_metadata_records = load_local_metadata_file(self) datafile_metadata = existing_metadata_records.get("datafiles", {}).get(self.name, {}) if not datafile_metadata: diff --git a/octue/resources/dataset.py b/octue/resources/dataset.py index e66234f88..9a3b5b659 100644 --- a/octue/resources/dataset.py +++ b/octue/resources/dataset.py @@ -4,7 +4,6 @@ import json import logging import os -import tempfile from collections.abc import Iterable import coolname @@ -17,6 +16,7 @@ from octue.resources.datafile import Datafile from octue.resources.filter_containers import FilterSet from octue.utils.encoders import OctueJSONEncoder +from octue.utils.files import RegisteredTemporaryDirectory from octue.utils.metadata import METADATA_FILENAME, UpdateLocalMetadata, load_local_metadata_file @@ -62,7 +62,7 @@ def __init__( self, path=None, files=None, - recursive=False, + recursive=True, ignore_stored_metadata=False, include_octue_metadata_files=False, id=None, @@ -96,6 +96,9 @@ def __init__( self._instantiate_from_local_directory(path=self.path) + if len(self.files) == 0: + logger.warning("%r is empty at instantiation time (path %r).", self, self.path) + @property def name(self): """Get the name of the dataset @@ -136,7 +139,7 @@ def all_files_are_in_cloud(self): return all(file.exists_in_cloud for file in self.files) @property - def _metadata_path(self): + def metadata_path(self): """Get the path to the dataset's metadata file. :return str: @@ -242,7 +245,7 @@ def update_cloud_metadata(self): """ GoogleCloudStorageClient().upload_from_string( string=json.dumps({"dataset": self.to_primitive(include_files=False)}, cls=OctueJSONEncoder), - cloud_path=self._metadata_path, + cloud_path=self.metadata_path, ) def update_local_metadata(self): @@ -250,7 +253,7 @@ def update_local_metadata(self): :return None: """ - with UpdateLocalMetadata(self._metadata_path) as existing_metadata_records: + with UpdateLocalMetadata(self) as existing_metadata_records: existing_metadata_records["dataset"] = self.to_primitive(include_files=False) os.makedirs(self.path, exist_ok=True) @@ -343,7 +346,7 @@ def download(self, local_directory=None): f"You can only download files from a cloud dataset. This dataset's path is {self.path!r}." ) - local_directory = os.path.abspath(local_directory or tempfile.TemporaryDirectory().name) + local_directory = os.path.abspath(local_directory or RegisteredTemporaryDirectory().name) datafiles_and_paths = [] for file in self.files: @@ -490,10 +493,10 @@ def _get_cloud_metadata(self): storage_client = GoogleCloudStorageClient() - if not storage_client.exists(cloud_path=self._metadata_path): + if not storage_client.exists(cloud_path=self.metadata_path): return - self._cloud_metadata = json.loads(storage_client.download_as_string(cloud_path=self._metadata_path)).get( + self._cloud_metadata = json.loads(storage_client.download_as_string(cloud_path=self.metadata_path)).get( "dataset", {} ) @@ -516,7 +519,7 @@ def _use_local_metadata(self): :return None: """ - local_metadata = load_local_metadata_file(self._metadata_path) + local_metadata = load_local_metadata_file(self) dataset_metadata = local_metadata.get("dataset", {}) if not dataset_metadata: diff --git a/octue/runner.py b/octue/runner.py index 5228429f5..df571cc87 100644 --- a/octue/runner.py +++ b/octue/runner.py @@ -1,8 +1,10 @@ import copy +import functools import logging import logging.handlers import os import re +import uuid import google.api_core.exceptions from google import auth @@ -17,7 +19,7 @@ from octue.resources import Child from octue.resources.analysis import CLASS_MAP, Analysis from octue.resources.datafile import downloaded_files -from octue.utils import gen_uuid +from octue.utils.files import registered_temporary_directories from twined import Twine @@ -42,10 +44,10 @@ class Runner: :param str|dict|None configuration_manifest: The strand data. Can be expressed as a string path of a *.json file (relative or absolute), as an open file-like object (containing json data), as a string of json data or as an already-parsed dict. :param str|list(dict)|None children: The children strand data. Can be expressed as a string path of a *.json file (relative or absolute), as an open file-like object (containing json data), as a string of json data or as an already-parsed dict. :param str|None output_location: the path to a cloud directory to save output datasets at - :param str|None diagnostics_cloud_path: the path to a cloud directory to store diagnostics in the event that the service fails while processing a question (this includes the configuration, input values and manifest, and logs) + :param str|None diagnostics_cloud_path: the path to a cloud directory to store diagnostics (this includes the configuration, input values and manifest, and logs for each question) :param str|None project_name: name of Google Cloud project to get credentials from :param str|None service_id: the ID of the service being run - :param bool delete_local_files: if `True`, delete any files downloaded during the call to `Runner.run` once the analysis has finished + :param bool delete_local_files: if `True`, delete any files downloaded and registered temporary directories created during an analysis once it's finished :return None: """ @@ -61,7 +63,7 @@ def __init__( project_name=None, service_id=None, service_registries=None, - delete_local_files=True, + delete_local_files=False, ): self.app_source = app_src self.children = children @@ -105,27 +107,40 @@ def __init__( self._project_name = project_name @classmethod - def from_configuration(cls, service_configuration, app_configuration, project_name=None, service_id=None): + def from_configuration( + cls, + service_configuration, + app_configuration, + project_name=None, + service_id=None, + **overrides, + ): """Instantiate a runner from a service and app configuration. :param octue.configuration.ServiceConfiguration service_configuration: :param octue.configuration.AppConfiguration app_configuration: :param str|None project_name: name of Google Cloud project to get credentials from :param str|None service_id: the ID of the service being run + :param overrides: optional keyword arguments to override the `Runner` instantiation parameters extracted from the service and app configuration :return octue.runner.Runner: a runner configured with the given service and app configuration """ - return cls( - app_src=service_configuration.app_source_path, - twine=service_configuration.twine_path, - configuration_values=app_configuration.configuration_values, - configuration_manifest=app_configuration.configuration_manifest, - children=app_configuration.children, - output_location=app_configuration.output_location, - diagnostics_cloud_path=service_configuration.diagnostics_cloud_path, - project_name=project_name, - service_id=service_id, - service_registries=service_configuration.service_registries, - ) + inputs = { + "app_src": service_configuration.app_source_path, + "twine": service_configuration.twine_path, + "configuration_values": app_configuration.configuration_values, + "configuration_manifest": app_configuration.configuration_manifest, + "children": app_configuration.children, + "output_location": app_configuration.output_location, + "diagnostics_cloud_path": service_configuration.diagnostics_cloud_path, + "project_name": project_name, + "service_id": service_id, + "service_registries": service_configuration.service_registries, + "delete_local_files": service_configuration.delete_local_files, + } + + inputs |= overrides + + return cls(**inputs) def __repr__(self): """Represent the runner as a string. @@ -144,6 +159,8 @@ def run( analysis_log_handler=None, handle_monitor_message=None, save_diagnostics=SAVE_DIAGNOSTICS_ON_CRASH, + originator_question_uuid=None, + originator=None, ): """Run an analysis. @@ -155,6 +172,8 @@ def run( :param logging.Handler|None analysis_log_handler: the logging.Handler instance which will be used to handle logs for this analysis run. Handlers can be created as per the logging cookbook https://docs.python.org/3/howto/logging-cookbook.html but should use the format defined above in LOG_FORMAT. :param callable|None handle_monitor_message: a function that sends monitor messages to the parent that requested the analysis :param str save_diagnostics: must be one of {"SAVE_DIAGNOSTICS_OFF", "SAVE_DIAGNOSTICS_ON_CRASH", "SAVE_DIAGNOSTICS_ON"}; if turned on, allow the input values and manifest (and its datasets) to be saved either all the time or just if the analysis fails + :param str|None originator_question_uuid: the UUID of the question that triggered all ancestor questions of this analysis; if `None`, this question is assumed to be the originator question + :param str|None originator: the SRUID of the service revision that triggered all ancestor questions of this question; if `None`, this service revision is assumed to be the originator :return octue.resources.analysis.Analysis: """ if save_diagnostics not in SAVE_DIAGNOSTICS_MODES: @@ -162,6 +181,15 @@ def run( f"`save_diagnostics` must be one of {SAVE_DIAGNOSTICS_MODES!r}; received {save_diagnostics!r}." ) + # Set the analysis ID if one isn't given. + analysis_id = str(analysis_id) if analysis_id else str(uuid.uuid4()) + + # This analysis is the parent question. + parent_question_uuid = analysis_id + + # If the originator question UUID isn't provided, assume that this analysis is the originator question. + originator_question_uuid = originator_question_uuid or analysis_id + # Get inputs before any transformations have been applied. self.diagnostics.add_data( analysis_id=analysis_id, @@ -198,7 +226,12 @@ def run( ) if inputs["children"] is not None: - inputs["children"] = self._instantiate_children(inputs["children"]) + inputs["children"] = self._instantiate_children( + serialised_children=inputs["children"], + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + originator=originator, + ) outputs_and_monitors = self.twine.prepare("monitor_message", "output_values", "output_manifest", cls=CLASS_MAP) @@ -207,8 +240,6 @@ def run( else: extra_log_handlers = [] - analysis_id = str(analysis_id) if analysis_id else gen_uuid() - # Temporarily replace the root logger's handlers with a `StreamHandler` and the analysis log handler that # include the analysis ID in the logging metadata. with AnalysisLogFormatterSwitcher( @@ -235,6 +266,8 @@ def run( raise ModuleNotFoundError(f"{e.msg} in {os.path.abspath(self.app_source)!r}.") except Exception as analysis_error: + logger.warning("App failed.") + if save_diagnostics in {SAVE_DIAGNOSTICS_ON_CRASH, SAVE_DIAGNOSTICS_ON}: self.diagnostics.upload() @@ -245,26 +278,7 @@ def run( thread.cancel() logger.debug("Periodic monitor message thread %d stopped.", i) - if not analysis.finalised: - analysis.finalise() - - if save_diagnostics == SAVE_DIAGNOSTICS_ON: - self.diagnostics.upload() - - if self.delete_local_files and downloaded_files: - logger.warning( - "Deleting files downloaded during analysis. This is not thread-safe - set " - "`delete_local_files=False` at instantiation of `Runner` to switch this off." - ) - - for path in downloaded_files: - logger.debug("Deleting downloaded file at %r.", path) - - try: - os.remove(path) - except FileNotFoundError: - logger.debug("Couldn't delete %r - it was already deleted.", path) - + self._finalise_and_clean_up(analysis, save_diagnostics) return analysis def _populate_environment_with_google_cloud_secrets(self): @@ -331,12 +345,20 @@ def _validate_dataset_file_tags(self, manifest_kind, manifest): raise twined.exceptions.invalid_contents_map[manifest_kind](message) - def _instantiate_children(self, serialised_children): + def _instantiate_children(self, serialised_children, parent_question_uuid, originator_question_uuid, originator): """Instantiate children from their serialised form (e.g. as given in the app configuration) so they are ready - to be asked questions. For diagnostics, each child's `ask` method is wrapped so the runner can record the - questions asked by the app, the responses received to each question, and the order the questions are asked in. + to be asked questions. Two sets of modifications are made to each child's `ask` method: + + 1. The parent question UUID is set to the current analysis ID, and the originator question UUID and originator + are set. + + 2. For diagnostics, the `ask` method is wrapped so the runner can record the questions asked by the app, the + responses received to each question, and the order the questions are asked in. :param list(dict) serialised_children: serialised children from e.g. the app configuration file + :param str|None parent_question_uuid: the UUID of the question that triggered this analysis + :param str originator_question_uuid: the UUID of the question that triggered all ancestor questions of this analysis + :param str originator: the SRUID of the service revision that triggered the tree of questions this analysis is related to :return dict: a mapping of child keys to `octue.resources.child.Child` instances """ children = {} @@ -345,11 +367,19 @@ def _instantiate_children(self, serialised_children): child = Child( id=uninstantiated_child["id"], backend=uninstantiated_child["backend"], - internal_service_name=self.service_id, + internal_sruid=self.service_id, service_registries=self.service_registries, ) child.ask = self._add_child_question_and_response_recording(child, uninstantiated_child["key"]) + + child.ask = functools.partial( + child.ask, + parent_question_uuid=parent_question_uuid, + originator_question_uuid=originator_question_uuid, + originator=originator, + ) + children[uninstantiated_child["key"]] = child return children @@ -407,3 +437,50 @@ def _load_and_run_app(self, analysis): # App as a function that takes "analysis" as an argument. self.app_source(analysis) + + def _finalise_and_clean_up(self, analysis, save_diagnostics): + """Do the following: + + 1. Finalise the analysis + 2. If diagnostics are switched on, upload the diagnostics + 3. If `delete_local_files=True`, delete any datafiles downloaded and registered temporary directories created during the analysis + + :param octue.resources.analysis.Analysis analysis: the analysis object containing the configuration and inputs to run the app on + :param str save_diagnostics: must be one of {"SAVE_DIAGNOSTICS_OFF", "SAVE_DIAGNOSTICS_ON_CRASH", "SAVE_DIAGNOSTICS_ON"}; if turned on, allow the input values and manifest (and its datasets) to be saved either all the time or just if the analysis fails + :return None: + """ + if not analysis.finalised: + analysis.finalise() + + if save_diagnostics == SAVE_DIAGNOSTICS_ON: + self.diagnostics.upload() + + if self.delete_local_files: + # Delete temporary directories first as this will delete entire downloaded datasets. + if registered_temporary_directories: + logger.warning( + "Deleting registered temporary directories created during analysis. This is not thread-safe - set " + "`delete_local_files=False` at instantiation of `Runner` to switch this off." + ) + + for dir in registered_temporary_directories: + logger.debug("Deleting temporary directory at %r.", dir.name) + dir.cleanup() + + # Then delete any datafiles were downloaded separately from a dataset. + if downloaded_files: + logger.warning( + "Deleting datafiles downloaded during analysis. This is not thread-safe - set " + "`delete_local_files=False` at instantiation of `Runner` to switch this off." + ) + + for path in downloaded_files: + if not os.path.exists(path): + continue + + logger.debug("Deleting downloaded file at %r.", path) + + try: + os.remove(path) + except FileNotFoundError: + logger.debug("Couldn't delete %r - it was already deleted.", path) diff --git a/octue/templates/template-using-manifests/app.py b/octue/templates/template-using-manifests/app.py index a07d18b13..33dace06e 100644 --- a/octue/templates/template-using-manifests/app.py +++ b/octue/templates/template-using-manifests/app.py @@ -1,10 +1,10 @@ import logging import os -import tempfile from cleaner import clean, read_csv_files, read_dat_file from octue.resources import Datafile, Dataset +from octue.utils.files import RegisteredTemporaryDirectory from tests import TEST_BUCKET_NAME @@ -76,33 +76,34 @@ def run(analysis): # Create a temporary directory for the output dataset. This avoids any race conditions arising (if other instances # of this application are running at the same time) and avoids any data loss due to overwriting. The temporary # directory is deleted once the "with" block is exited. - with tempfile.TemporaryDirectory() as temporary_directory: - timeseries_datafile = Datafile( - path=os.path.join(temporary_directory, "cleaned.csv"), - labels=["timeseries"], - ) - - # Write the file (now we know where to write it) - with timeseries_datafile.open("w") as fp: - data.to_csv(path_or_buf=fp) - - # You can replace empty output datasets with datasets instantiated from a local or cloud directory. - analysis.output_manifest.datasets["cleaned_met_mast_data"] = Dataset( - path=temporary_directory, - name="cleaned_met_mast_data", - ) - - # We'll add some labels, which will help to improve searchability and allow other apps, reports, users and - # analyses to automatically find figures and use them. - # - # Labels are case insensitive, and accept a-z, 0-9, and hyphens which can be used literally in search and are - # also used to separate words in natural language search. - analysis.output_manifest.get_dataset("cleaned_met_mast_data").labels = ["met", "mast", "cleaned"] - - # Finalise the analysis. This validates the output data and output manifest against the twine and optionally - # uploads any datasets in the output manifest to the service's cloud bucket. Signed URLs are provided so that - # the parent that asked the service for the analysis can access the data (until the signed URLs expire). - analysis.finalise(upload_output_datasets_to=f"gs://{TEST_BUCKET_NAME}/output/test_using_manifests_analysis") + temporary_directory = RegisteredTemporaryDirectory().name + + timeseries_datafile = Datafile( + path=os.path.join(temporary_directory, "cleaned.csv"), + labels=["timeseries"], + ) + + # Write the file (now we know where to write it) + with timeseries_datafile.open("w") as fp: + data.to_csv(path_or_buf=fp) + + # You can replace empty output datasets with datasets instantiated from a local or cloud directory. + analysis.output_manifest.datasets["cleaned_met_mast_data"] = Dataset( + path=temporary_directory, + name="cleaned_met_mast_data", + ) + + # We'll add some labels, which will help to improve searchability and allow other apps, reports, users and + # analyses to automatically find figures and use them. + # + # Labels are case insensitive, and accept a-z, 0-9, and hyphens which can be used literally in search and are + # also used to separate words in natural language search. + analysis.output_manifest.get_dataset("cleaned_met_mast_data").labels = ["met", "mast", "cleaned"] + + # Finalise the analysis. This validates the output data and output manifest against the twine and optionally + # uploads any datasets in the output manifest to the service's cloud bucket. Signed URLs are provided so that + # the parent that asked the service for the analysis can access the data (until the signed URLs expire). + analysis.finalise(upload_output_datasets_to=f"gs://{TEST_BUCKET_NAME}/output/test_using_manifests_analysis") # We're done! There's only one datafile in the output dataset, but you could create thousands more and add them # all :) diff --git a/octue/utils/exceptions.py b/octue/utils/exceptions.py index 4c059a8d7..e1de2f878 100644 --- a/octue/utils/exceptions.py +++ b/octue/utils/exceptions.py @@ -25,12 +25,20 @@ def create_exceptions_mapping(*sources): return exceptions_mapping -def convert_exception_to_primitives(): - """Convert an exception into a dictionary of its type, message, and traceback as JSON-serialisable primitives. +def convert_exception_to_primitives(exception=None): + """Convert an exception into a dictionary of its type, message, and traceback as JSON-serialisable primitives. The + exception is acquired using `sys.exc_info` if one is not supplied. + :param Exception|None exception: the exception to convert; if `None`, the exception is acquired using `sys.exc_info` :return dict: a dictionary with "type", "message" and "traceback" keys and JSON-serialisable values """ - exception_info = sys.exc_info() + if exception: + try: + raise exception + except Exception: + exception_info = sys.exc_info() + else: + exception_info = sys.exc_info() return { "type": exception_info[0].__name__, diff --git a/octue/utils/files.py b/octue/utils/files.py new file mode 100644 index 000000000..f5f81000a --- /dev/null +++ b/octue/utils/files.py @@ -0,0 +1,12 @@ +import tempfile + + +registered_temporary_directories = [] + + +class RegisteredTemporaryDirectory(tempfile.TemporaryDirectory): + """A temporary directory that's registered at instantiation so it can be referenced later.""" + + def __init__(self, suffix=None, prefix=None, dir=None, ignore_cleanup_errors=False): + super().__init__(suffix=suffix, prefix=prefix, dir=dir, ignore_cleanup_errors=ignore_cleanup_errors) + registered_temporary_directories.append(self) diff --git a/octue/utils/metadata.py b/octue/utils/metadata.py index 63622721f..0222d6645 100644 --- a/octue/utils/metadata.py +++ b/octue/utils/metadata.py @@ -15,15 +15,16 @@ class UpdateLocalMetadata: - """A context manager that provides the contents of the given local metadata file and updates it with any changes - made within its context. The local metadata is retrieved either from the disk or from the cache as appropriate. + """A context manager that provides the contents of the given dataset's or datafile's local metadata file and updates + it with any changes made within its context. The local metadata is retrieved either from the disk or from the cache + as appropriate. - :param str path: the path to the local metadata. The file must be in JSON format. + :param octue.resources.datafile.Datafile|octue.resources.dataset.Dataset datafile_or_dataset: the datafile or dataset to update the local metadata for :return None: """ - def __init__(self, path=METADATA_FILENAME): - self.path = path + def __init__(self, datafile_or_dataset): + self.datafile_or_dataset = datafile_or_dataset self._local_metadata = None def __enter__(self): @@ -31,7 +32,7 @@ def __enter__(self): :return any: the contents of the local metadata file (converted from the JSON in the local metadata file) """ - self._local_metadata = load_local_metadata_file(self.path) + self._local_metadata = load_local_metadata_file(self.datafile_or_dataset) return self._local_metadata def __exit__(self, exc_type, exc_val, exc_tb): @@ -39,101 +40,78 @@ def __exit__(self, exc_type, exc_val, exc_tb): :return None: """ - overwrite_local_metadata_file(self._local_metadata, self.path) + overwrite_local_metadata_file(self._local_metadata, self.datafile_or_dataset) -def load_local_metadata_file(path=METADATA_FILENAME): - """Load metadata from a local metadata records file, returning an empty dictionary if the file does not exist or is - incorrectly formatted. If the file has already been cached, its contents are retrieved from the cache. +def load_local_metadata_file(datafile_or_dataset): + """Load metadata from the local metadata records file for a datafile or dataset, returning an empty dictionary if + the file does not exist or is incorrectly formatted. If the file has already been cached, its contents are retrieved + from the cache. - :param str path: the path to the local metadata file + :param octue.resources.datafile.Datafile|octue.resources.dataset.Dataset datafile_or_dataset: the datafile or dataset to load the local metadata file for :return dict: the contents of the local metadata file """ - absolute_path = _get_absolute_path(path) - cached_metadata = _get_metadata_from_cache(absolute_path) + cached_metadata = _get_metadata_from_cache(datafile_or_dataset) if cached_metadata: return cached_metadata - if not os.path.exists(path): + if not os.path.exists(datafile_or_dataset.metadata_path): local_metadata = {} else: - with open(path) as f: + with open(datafile_or_dataset.metadata_path) as f: try: local_metadata = json.load(f, cls=OctueJSONDecoder) except json.decoder.JSONDecodeError: logger.warning( - f"The metadata file at {path!r} is incorrectly formatted so no metadata can be read from it. " - "Please fix or delete it." + f"The metadata file at {datafile_or_dataset.metadata_path!r} is incorrectly formatted so no " + f"metadata can be read from it. Please fix or delete it." ) local_metadata = {} - _overwrite_cache_entry(absolute_path, local_metadata) + _overwrite_cache_entry(datafile_or_dataset, local_metadata) return local_metadata -def overwrite_local_metadata_file(data, path=METADATA_FILENAME): - """Create or overwrite the given local metadata file with the given data. If the data to overwrite the file with is - the same as the file's cache entry, no changes are made. If it's not, the cache entry is updated and the file is - overwritten. +def overwrite_local_metadata_file(data, datafile_or_dataset): + """Create or overwrite the local metadata file of a datafile or dataset with the given data. If the data to + overwrite the file with is the same as the file's cache entry, no changes are made. If it's not, the cache entry is + updated and the file is overwritten. :param dict data: the data to overwrite the local metadata file with - :param str path: the path to the local metadata file + :param octue.resources.datafile.Datafile|octue.resources.dataset.Dataset datafile_or_dataset: the datafile or dataset to overwrite the local metadata file for :return None: """ - absolute_path = _get_absolute_path(path) - cached_metadata = _get_metadata_from_cache(absolute_path) + cached_metadata = _get_metadata_from_cache(datafile_or_dataset) if data == cached_metadata: logger.debug("Avoiding overwriting local metadata file - its data is already in sync with the cache.") return - _overwrite_cache_entry(absolute_path, data) + _overwrite_cache_entry(datafile_or_dataset, data) - with open(path, "w") as f: + with open(datafile_or_dataset.metadata_path, "w") as f: json.dump(data, f, cls=OctueJSONEncoder, indent=4) f.write("\n") -def _get_absolute_path(path): - """Get the file's absolute path. If the file doesn't exist, create it initialised with an empty JSON object first. - This method overcomes the `FileNotFoundError` sometimes raised `os.path.abspath` for a non-existent path (this seems - like a bug, but it's unclear). +def _get_metadata_from_cache(datafile_or_dataset): + """Get metadata for a datafile or dataset from the cache. If it's not cached, return `None`. - :param str path: a path to a file - :return str: the absolute path of the file - """ - try: - return os.path.abspath(path) - except FileNotFoundError: - - # Make the directories above the path if `os.path.dirname` doesn't return an empty string. - if os.path.dirname(path): - os.makedirs(os.path.dirname(path), exist_ok=True) - - with open(path, "w") as f: - json.dump({}, f) - - return os.path.abspath(path) - - -def _get_metadata_from_cache(absolute_path): - """Get the metadata for the given local metadata file from the cache. If it's not cached, return `None`. - - :param str absolute_path: the path to the local metadata file + :param octue.resources.datafile.Datafile|octue.resources.dataset.Dataset datafile_or_dataset: the datafile or dataset to get metadata from the cache for :return dict|None: the metadata or, if the file hasn't been cached, `None` """ - logger.debug("Using cached local metadata.") - return copy.deepcopy(cached_local_metadata_files.get(absolute_path)) + logger.debug("Using cached local metadata for %r.", datafile_or_dataset) + return copy.deepcopy(cached_local_metadata_files.get(datafile_or_dataset.id)) -def _overwrite_cache_entry(absolute_path, data): - """Overwrite the metadata cache entry for the given local metadata file. +def _overwrite_cache_entry(datafile_or_dataset, data): + """Overwrite the metadata cache entry for a datafile or dataset. - :param str absolute_path: the path to the local metadata file + :param octue.resources.datafile.Datafile|octue.resources.dataset.Dataset datafile_or_dataset: the datafile or dataset to overwrite metadata in the cache for :param dict data: the data to overwrite the existing cache entry with. :return None: """ - cached_local_metadata_files[absolute_path] = copy.deepcopy(data) - logger.debug("Updated local metadata cache.") + cached_local_metadata_files[datafile_or_dataset.id] = copy.deepcopy(data) + logger.debug("Updated local metadata cache for %r.", datafile_or_dataset) diff --git a/poetry.lock b/poetry.lock index 5a5b80a65..e0fe15b70 100644 --- a/poetry.lock +++ b/poetry.lock @@ -248,13 +248,13 @@ files = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -469,18 +469,21 @@ toml = ["toml"] [[package]] name = "cssutils" -version = "2.10.2" +version = "2.11.1" description = "A CSS Cascading Style Sheets library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "cssutils-2.10.2-py3-none-any.whl", hash = "sha256:4ad7d2f29270b22cf199f65a6b5e795f2c3130f3b9fb50c3d45e5054ef86e41a"}, - {file = "cssutils-2.10.2.tar.gz", hash = "sha256:93cf92a350b1c123b17feff042e212f94d960975a3ed145743d84ebe8ccec7ab"}, + {file = "cssutils-2.11.1-py3-none-any.whl", hash = "sha256:a67bfdfdff4f3867fab43698ec4897c1a828eca5973f4073321b3bccaf1199b1"}, + {file = "cssutils-2.11.1.tar.gz", hash = "sha256:0563a76513b6af6eebbe788c3bf3d01c920e46b3f90c8416738c5cfc773ff8e2"}, ] +[package.dependencies] +more-itertools = "*" + [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [[package]] name = "dateparser" @@ -504,23 +507,6 @@ calendars = ["convertdate", "convertdate", "hijri-converter"] fasttext = ["fasttext"] langdetect = ["langdetect"] -[[package]] -name = "db-dtypes" -version = "1.2.0" -description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" -optional = true -python-versions = ">=3.7" -files = [ - {file = "db-dtypes-1.2.0.tar.gz", hash = "sha256:3531bb1fb8b5fbab33121fe243ccc2ade16ab2524f4c113b05cc702a1908e6ea"}, - {file = "db_dtypes-1.2.0-py2.py3-none-any.whl", hash = "sha256:6320bddd31d096447ef749224d64aab00972ed20e4392d86f7d8b81ad79f7ff0"}, -] - -[package.dependencies] -numpy = ">=1.16.6" -packaging = ">=17.0" -pandas = ">=0.24.2" -pyarrow = ">=3.0.0" - [[package]] name = "dict2css" version = "0.3.0.post1" @@ -549,28 +535,29 @@ files = [ [[package]] name = "docutils" -version = "0.17.1" +version = "0.18.1" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, + {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, + {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, ] [[package]] name = "domdf-python-tools" -version = "3.8.0.post2" +version = "3.9.0" description = "Helpful functions for Python 🐍 🛠️" optional = false python-versions = ">=3.6" files = [ - {file = "domdf_python_tools-3.8.0.post2-py3-none-any.whl", hash = "sha256:ad2c763c8d00850a7fa92ad95e9891a1918281ea25322c4dbb1734fd32f905dd"}, - {file = "domdf_python_tools-3.8.0.post2.tar.gz", hash = "sha256:a1fd255ea29f767b08de462d2da39d360262304389227d980bc307ee8aa3366a"}, + {file = "domdf_python_tools-3.9.0-py3-none-any.whl", hash = "sha256:4e1ef365cbc24627d6d1e90cf7d46d8ab8df967e1237f4a26885f6986c78872e"}, + {file = "domdf_python_tools-3.9.0.tar.gz", hash = "sha256:1f8a96971178333a55e083e35610d7688cd7620ad2b99790164e1fc1a3614c18"}, ] [package.dependencies] importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.9\""} +importlib-resources = {version = ">=3.0.0", markers = "python_version < \"3.9\""} natsort = ">=7.0.1" typing-extensions = ">=3.7.4.1" @@ -580,13 +567,13 @@ dates = ["pytz (>=2019.1)"] [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -594,18 +581,18 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.14.0" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, - {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -683,13 +670,13 @@ google-crc32c = "1.3.0" [[package]] name = "google-api-core" -version = "2.19.0" +version = "2.19.1" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.19.0.tar.gz", hash = "sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10"}, - {file = "google_api_core-2.19.0-py3-none-any.whl", hash = "sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251"}, + {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, + {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, ] [package.dependencies] @@ -704,7 +691,7 @@ grpcio-status = [ {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" [package.extras] @@ -714,13 +701,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.29.0" +version = "2.32.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, - {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, + {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, + {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, ] [package.dependencies] @@ -737,13 +724,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-bigquery" -version = "3.22.0" +version = "3.25.0" description = "Google BigQuery API client library" -optional = true +optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-bigquery-3.22.0.tar.gz", hash = "sha256:957591e6f948d7cb4aa0f7a8e4e47b4617cd7f0269e28a71c37953c39b6e8a4c"}, - {file = "google_cloud_bigquery-3.22.0-py2.py3-none-any.whl", hash = "sha256:80c8e31a23b68b7d3ae5d138c9a9edff69d100ee812db73a5e63c79a13a5063d"}, + {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, + {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, ] [package.dependencies] @@ -786,13 +773,13 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-pubsub" -version = "2.21.1" +version = "2.22.0" description = "Google Cloud Pub/Sub API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-pubsub-2.21.1.tar.gz", hash = "sha256:31fcf07444b7f813a616c4b650e1fbf1dc998a088fe0059a76164855ac17f05c"}, - {file = "google_cloud_pubsub-2.21.1-py2.py3-none-any.whl", hash = "sha256:55a3602ec45bc09626604d712032288a8ee3566145cb83523cff908938f69a4b"}, + {file = "google_cloud_pubsub-2.22.0-py2.py3-none-any.whl", hash = "sha256:229bf60a3835c1bb21ee36c7d4368b111097678b8ed25d3fbc5e639a1d03388d"}, + {file = "google_cloud_pubsub-2.22.0.tar.gz", hash = "sha256:a4c2b1a5ca2c0b32c8d3776c85f498266c3d79696696ea67010c857b45af17d8"}, ] [package.dependencies] @@ -805,20 +792,20 @@ proto-plus = [ {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [package.extras] libcst = ["libcst (>=0.3.10)"] [[package]] name = "google-cloud-secret-manager" -version = "2.20.0" +version = "2.20.1" description = "Google Cloud Secret Manager API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-secret-manager-2.20.0.tar.gz", hash = "sha256:a086a7413aaf4fffbd1c4fe9229ef0ce9bcf48f5a8df5b449c4a32deb5a2cfde"}, - {file = "google_cloud_secret_manager-2.20.0-py2.py3-none-any.whl", hash = "sha256:c20bf22e59d220c51aa84a1db3411b14b83aa71f788fae8d273c03a4bf3e77ed"}, + {file = "google-cloud-secret-manager-2.20.1.tar.gz", hash = "sha256:91ca4f5424d80ce4f5e78deca82996ee387b8c8e060d16981690e31e3a42138b"}, + {file = "google_cloud_secret_manager-2.20.1-py2.py3-none-any.whl", hash = "sha256:56ed896d9dfc981a9c7188bbb49c17c621ef9c49fcb5aa922ee0f5969cc3a490"}, ] [package.dependencies] @@ -826,17 +813,17 @@ google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extr google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [[package]] name = "google-cloud-storage" -version = "2.16.0" +version = "2.17.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, - {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, + {file = "google-cloud-storage-2.17.0.tar.gz", hash = "sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388"}, + {file = "google_cloud_storage-2.17.0-py2.py3-none-any.whl", hash = "sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1"}, ] [package.dependencies] @@ -907,13 +894,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.7.0" +version = "2.7.1" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false -python-versions = ">= 3.7" +python-versions = ">=3.7" files = [ - {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, - {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, + {file = "google-resumable-media-2.7.1.tar.gz", hash = "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33"}, + {file = "google_resumable_media-2.7.1-py2.py3-none-any.whl", hash = "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c"}, ] [package.dependencies] @@ -925,111 +912,111 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.63.0" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, - {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpc-google-iam-v1" -version = "0.13.0" +version = "0.13.1" description = "IAM API client library" optional = false python-versions = ">=3.7" files = [ - {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, - {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, + {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, + {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, ] [package.dependencies] googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [[package]] name = "grpcio" -version = "1.63.0" +version = "1.64.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.63.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:2e93aca840c29d4ab5db93f94ed0a0ca899e241f2e8aec6334ab3575dc46125c"}, - {file = "grpcio-1.63.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:91b73d3f1340fefa1e1716c8c1ec9930c676d6b10a3513ab6c26004cb02d8b3f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b3afbd9d6827fa6f475a4f91db55e441113f6d3eb9b7ebb8fb806e5bb6d6bd0d"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f3f6883ce54a7a5f47db43289a0a4c776487912de1a0e2cc83fdaec9685cc9f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf8dae9cc0412cb86c8de5a8f3be395c5119a370f3ce2e69c8b7d46bb9872c8d"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08e1559fd3b3b4468486b26b0af64a3904a8dbc78d8d936af9c1cf9636eb3e8b"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c039ef01516039fa39da8a8a43a95b64e288f79f42a17e6c2904a02a319b357"}, - {file = "grpcio-1.63.0-cp310-cp310-win32.whl", hash = "sha256:ad2ac8903b2eae071055a927ef74121ed52d69468e91d9bcbd028bd0e554be6d"}, - {file = "grpcio-1.63.0-cp310-cp310-win_amd64.whl", hash = "sha256:b2e44f59316716532a993ca2966636df6fbe7be4ab6f099de6815570ebe4383a"}, - {file = "grpcio-1.63.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:f28f8b2db7b86c77916829d64ab21ff49a9d8289ea1564a2b2a3a8ed9ffcccd3"}, - {file = "grpcio-1.63.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65bf975639a1f93bee63ca60d2e4951f1b543f498d581869922910a476ead2f5"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b5194775fec7dc3dbd6a935102bb156cd2c35efe1685b0a46c67b927c74f0cfb"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4cbb2100ee46d024c45920d16e888ee5d3cf47c66e316210bc236d5bebc42b3"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff737cf29b5b801619f10e59b581869e32f400159e8b12d7a97e7e3bdeee6a2"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd1e68776262dd44dedd7381b1a0ad09d9930ffb405f737d64f505eb7f77d6c7"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f45f27f516548e23e4ec3fbab21b060416007dbe768a111fc4611464cc773f"}, - {file = "grpcio-1.63.0-cp311-cp311-win32.whl", hash = "sha256:878b1d88d0137df60e6b09b74cdb73db123f9579232c8456f53e9abc4f62eb3c"}, - {file = "grpcio-1.63.0-cp311-cp311-win_amd64.whl", hash = "sha256:756fed02dacd24e8f488f295a913f250b56b98fb793f41d5b2de6c44fb762434"}, - {file = "grpcio-1.63.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:93a46794cc96c3a674cdfb59ef9ce84d46185fe9421baf2268ccb556f8f81f57"}, - {file = "grpcio-1.63.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a7b19dfc74d0be7032ca1eda0ed545e582ee46cd65c162f9e9fc6b26ef827dc6"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8064d986d3a64ba21e498b9a376cbc5d6ab2e8ab0e288d39f266f0fca169b90d"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:219bb1848cd2c90348c79ed0a6b0ea51866bc7e72fa6e205e459fedab5770172"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2d60cd1d58817bc5985fae6168d8b5655c4981d448d0f5b6194bbcc038090d2"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e350cb096e5c67832e9b6e018cf8a0d2a53b2a958f6251615173165269a91b0"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:56cdf96ff82e3cc90dbe8bac260352993f23e8e256e063c327b6cf9c88daf7a9"}, - {file = "grpcio-1.63.0-cp312-cp312-win32.whl", hash = "sha256:3a6d1f9ea965e750db7b4ee6f9fdef5fdf135abe8a249e75d84b0a3e0c668a1b"}, - {file = "grpcio-1.63.0-cp312-cp312-win_amd64.whl", hash = "sha256:d2497769895bb03efe3187fb1888fc20e98a5f18b3d14b606167dacda5789434"}, - {file = "grpcio-1.63.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:fdf348ae69c6ff484402cfdb14e18c1b0054ac2420079d575c53a60b9b2853ae"}, - {file = "grpcio-1.63.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a3abfe0b0f6798dedd2e9e92e881d9acd0fdb62ae27dcbbfa7654a57e24060c0"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6ef0ad92873672a2a3767cb827b64741c363ebaa27e7f21659e4e31f4d750280"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b416252ac5588d9dfb8a30a191451adbf534e9ce5f56bb02cd193f12d8845b7f"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b77eaefc74d7eb861d3ffbdf91b50a1bb1639514ebe764c47773b833fa2d91"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b005292369d9c1f80bf70c1db1c17c6c342da7576f1c689e8eee4fb0c256af85"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdcda1156dcc41e042d1e899ba1f5c2e9f3cd7625b3d6ebfa619806a4c1aadda"}, - {file = "grpcio-1.63.0-cp38-cp38-win32.whl", hash = "sha256:01799e8649f9e94ba7db1aeb3452188048b0019dc37696b0f5ce212c87c560c3"}, - {file = "grpcio-1.63.0-cp38-cp38-win_amd64.whl", hash = "sha256:6a1a3642d76f887aa4009d92f71eb37809abceb3b7b5a1eec9c554a246f20e3a"}, - {file = "grpcio-1.63.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:75f701ff645858a2b16bc8c9fc68af215a8bb2d5a9b647448129de6e85d52bce"}, - {file = "grpcio-1.63.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cacdef0348a08e475a721967f48206a2254a1b26ee7637638d9e081761a5ba86"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:0697563d1d84d6985e40ec5ec596ff41b52abb3fd91ec240e8cb44a63b895094"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6426e1fb92d006e47476d42b8f240c1d916a6d4423c5258ccc5b105e43438f61"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48cee31bc5f5a31fb2f3b573764bd563aaa5472342860edcc7039525b53e46a"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50344663068041b34a992c19c600236e7abb42d6ec32567916b87b4c8b8833b3"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:259e11932230d70ef24a21b9fb5bb947eb4703f57865a404054400ee92f42f5d"}, - {file = "grpcio-1.63.0-cp39-cp39-win32.whl", hash = "sha256:a44624aad77bf8ca198c55af811fd28f2b3eaf0a50ec5b57b06c034416ef2d0a"}, - {file = "grpcio-1.63.0-cp39-cp39-win_amd64.whl", hash = "sha256:166e5c460e5d7d4656ff9e63b13e1f6029b122104c1633d5f37eaea348d7356d"}, - {file = "grpcio-1.63.0.tar.gz", hash = "sha256:f3023e14805c61bc439fb40ca545ac3d5740ce66120a678a3c6c2c55b70343d1"}, + {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, + {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, + {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, + {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, + {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, + {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, + {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, + {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, + {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, + {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, + {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, + {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, + {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, + {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, + {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, + {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, + {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, + {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, + {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, + {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, + {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.63.0)"] +protobuf = ["grpcio-tools (>=1.64.1)"] [[package]] name = "grpcio-status" -version = "1.62.2" +version = "1.64.1" description = "Status proto mapping for gRPC" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "grpcio-status-1.62.2.tar.gz", hash = "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a"}, - {file = "grpcio_status-1.62.2-py3-none-any.whl", hash = "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f"}, + {file = "grpcio_status-1.64.1-py3-none-any.whl", hash = "sha256:2ec6e0777958831484a517e32b6ffe0a4272242eae81bff2f5c3707fa58b40e3"}, + {file = "grpcio_status-1.64.1.tar.gz", hash = "sha256:c50bd14eb6506d8580a6c553bea463d7c08499b2c0e93f6d1864c5e8eabb1066"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.2" -protobuf = ">=4.21.6" +grpcio = ">=1.64.1" +protobuf = ">=5.26.1,<6.0dev" [[package]] name = "gunicorn" @@ -1108,13 +1095,13 @@ lxml = ["lxml"] [[package]] name = "identify" -version = "2.5.36" +version = "2.6.0" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, - {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, + {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, + {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, ] [package.extras] @@ -1144,22 +1131,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" @@ -1237,13 +1224,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.22.0" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, - {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] @@ -1256,7 +1243,7 @@ rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" @@ -1273,17 +1260,6 @@ files = [ importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.31.0" -[[package]] -name = "lockfile" -version = "0.12.2" -description = "Platform-independent file locking module" -optional = false -python-versions = "*" -files = [ - {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, - {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, -] - [[package]] name = "markupsafe" version = "2.1.5" @@ -1364,6 +1340,17 @@ files = [ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] +[[package]] +name = "more-itertools" +version = "10.3.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.3.0.tar.gz", hash = "sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463"}, + {file = "more_itertools-10.3.0-py3-none-any.whl", hash = "sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320"}, +] + [[package]] name = "msgpack" version = "1.0.8" @@ -1458,18 +1445,15 @@ icu = ["PyICU (>=1.0.0)"] [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] name = "numpy" version = "1.24.4" @@ -1509,13 +1493,13 @@ files = [ [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -1590,13 +1574,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.1" +version = "4.2.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, - {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] @@ -1639,39 +1623,39 @@ virtualenv = ">=20.10.0" [[package]] name = "proto-plus" -version = "1.23.0" +version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, - {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, ] [package.dependencies] -protobuf = ">=3.19.0,<5.0.0dev" +protobuf = ">=3.19.0,<6.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.31.5)"] +testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.3" +version = "5.27.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, + {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, + {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, + {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, + {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, + {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, + {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, + {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, + {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, ] [[package]] @@ -1685,54 +1669,6 @@ files = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -[[package]] -name = "pyarrow" -version = "16.0.0" -description = "Python library for Apache Arrow" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pyarrow-16.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:22a1fdb1254e5095d629e29cd1ea98ed04b4bbfd8e42cc670a6b639ccc208b60"}, - {file = "pyarrow-16.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:574a00260a4ed9d118a14770edbd440b848fcae5a3024128be9d0274dbcaf858"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0815d0ddb733b8c1b53a05827a91f1b8bde6240f3b20bf9ba5d650eb9b89cdf"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df0080339387b5d30de31e0a149c0c11a827a10c82f0c67d9afae3981d1aabb7"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:edf38cce0bf0dcf726e074159c60516447e4474904c0033f018c1f33d7dac6c5"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91d28f9a40f1264eab2af7905a4d95320ac2f287891e9c8b0035f264fe3c3a4b"}, - {file = "pyarrow-16.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:99af421ee451a78884d7faea23816c429e263bd3618b22d38e7992c9ce2a7ad9"}, - {file = "pyarrow-16.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d22d0941e6c7bafddf5f4c0662e46f2075850f1c044bf1a03150dd9e189427ce"}, - {file = "pyarrow-16.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:266ddb7e823f03733c15adc8b5078db2df6980f9aa93d6bb57ece615df4e0ba7"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cc23090224b6594f5a92d26ad47465af47c1d9c079dd4a0061ae39551889efe"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56850a0afe9ef37249d5387355449c0f94d12ff7994af88f16803a26d38f2016"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:705db70d3e2293c2f6f8e84874b5b775f690465798f66e94bb2c07bab0a6bb55"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:5448564754c154997bc09e95a44b81b9e31ae918a86c0fcb35c4aa4922756f55"}, - {file = "pyarrow-16.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:729f7b262aa620c9df8b9967db96c1575e4cfc8c25d078a06968e527b8d6ec05"}, - {file = "pyarrow-16.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fb8065dbc0d051bf2ae2453af0484d99a43135cadabacf0af588a3be81fbbb9b"}, - {file = "pyarrow-16.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:20ce707d9aa390593ea93218b19d0eadab56390311cb87aad32c9a869b0e958c"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5823275c8addbbb50cd4e6a6839952682a33255b447277e37a6f518d6972f4e1"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab8b9050752b16a8b53fcd9853bf07d8daf19093533e990085168f40c64d978"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:42e56557bc7c5c10d3e42c3b32f6cff649a29d637e8f4e8b311d334cc4326730"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a7abdee4a4a7cfa239e2e8d721224c4b34ffe69a0ca7981354fe03c1328789b"}, - {file = "pyarrow-16.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:ef2f309b68396bcc5a354106741d333494d6a0d3e1951271849787109f0229a6"}, - {file = "pyarrow-16.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ed66e5217b4526fa3585b5e39b0b82f501b88a10d36bd0d2a4d8aa7b5a48e2df"}, - {file = "pyarrow-16.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc8814310486f2a73c661ba8354540f17eef51e1b6dd090b93e3419d3a097b3a"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c2f5e239db7ed43e0ad2baf46a6465f89c824cc703f38ef0fde927d8e0955f7"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f293e92d1db251447cb028ae12f7bc47526e4649c3a9924c8376cab4ad6b98bd"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:dd9334a07b6dc21afe0857aa31842365a62eca664e415a3f9536e3a8bb832c07"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d91073d1e2fef2c121154680e2ba7e35ecf8d4969cc0af1fa6f14a8675858159"}, - {file = "pyarrow-16.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:71d52561cd7aefd22cf52538f262850b0cc9e4ec50af2aaa601da3a16ef48877"}, - {file = "pyarrow-16.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b93c9a50b965ee0bf4fef65e53b758a7e8dcc0c2d86cebcc037aaaf1b306ecc0"}, - {file = "pyarrow-16.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d831690844706e374c455fba2fb8cfcb7b797bfe53ceda4b54334316e1ac4fa4"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35692ce8ad0b8c666aa60f83950957096d92f2a9d8d7deda93fb835e6053307e"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dd3151d098e56f16a8389c1247137f9e4c22720b01c6f3aa6dec29a99b74d80"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bd40467bdb3cbaf2044ed7a6f7f251c8f941c8b31275aaaf88e746c4f3ca4a7a"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:00a1dcb22ad4ceb8af87f7bd30cc3354788776c417f493089e0a0af981bc8d80"}, - {file = "pyarrow-16.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fda9a7cebd1b1d46c97b511f60f73a5b766a6de4c5236f144f41a5d5afec1f35"}, - {file = "pyarrow-16.0.0.tar.gz", hash = "sha256:59bb1f1edbbf4114c72415f039f1359f1a57d166a331c3229788ccbfbb31689a"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - [[package]] name = "pyasn1" version = "0.6.0" @@ -2014,13 +1950,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2035,110 +1971,101 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rpds-py" -version = "0.18.1" +version = "0.19.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, - {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, - {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, - {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, - {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, - {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, - {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, - {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, - {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, - {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, - {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, - {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, - {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, + {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"}, + {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"}, + {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"}, + {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"}, + {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"}, + {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"}, + {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"}, + {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"}, + {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"}, + {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"}, + {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"}, + {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"}, + {file = "rpds_py-0.19.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:52e466bea6f8f3a44b1234570244b1cff45150f59a4acae3fcc5fd700c2993ca"}, + {file = "rpds_py-0.19.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e21cc693045fda7f745c790cb687958161ce172ffe3c5719ca1764e752237d16"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b31f059878eb1f5da8b2fd82480cc18bed8dcd7fb8fe68370e2e6285fa86da6"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dd46f309e953927dd018567d6a9e2fb84783963650171f6c5fe7e5c41fd5666"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a01a4490e170376cd79258b7f755fa13b1a6c3667e872c8e35051ae857a92b"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcf426a8c38eb57f7bf28932e68425ba86def6e756a5b8cb4731d8e62e4e0223"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68eea5df6347d3f1378ce992d86b2af16ad7ff4dcb4a19ccdc23dea901b87fb"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dab8d921b55a28287733263c0e4c7db11b3ee22aee158a4de09f13c93283c62d"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6fe87efd7f47266dfc42fe76dae89060038f1d9cb911f89ae7e5084148d1cc08"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:535d4b52524a961d220875688159277f0e9eeeda0ac45e766092bfb54437543f"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8b1a94b8afc154fbe36978a511a1f155f9bd97664e4f1f7a374d72e180ceb0ae"}, + {file = "rpds_py-0.19.0-cp38-none-win32.whl", hash = "sha256:7c98298a15d6b90c8f6e3caa6457f4f022423caa5fa1a1ca7a5e9e512bdb77a4"}, + {file = "rpds_py-0.19.0-cp38-none-win_amd64.whl", hash = "sha256:b0da31853ab6e58a11db3205729133ce0df26e6804e93079dee095be3d681dc1"}, + {file = "rpds_py-0.19.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5039e3cef7b3e7a060de468a4a60a60a1f31786da94c6cb054e7a3c75906111c"}, + {file = "rpds_py-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab1932ca6cb8c7499a4d87cb21ccc0d3326f172cfb6a64021a889b591bb3045c"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2afd2164a1e85226fcb6a1da77a5c8896c18bfe08e82e8ceced5181c42d2179"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1c30841f5040de47a0046c243fc1b44ddc87d1b12435a43b8edff7e7cb1e0d0"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f757f359f30ec7dcebca662a6bd46d1098f8b9fb1fcd661a9e13f2e8ce343ba1"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15e65395a59d2e0e96caf8ee5389ffb4604e980479c32742936ddd7ade914b22"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb0f6eb3a320f24b94d177e62f4074ff438f2ad9d27e75a46221904ef21a7b05"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b228e693a2559888790936e20f5f88b6e9f8162c681830eda303bad7517b4d5a"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2575efaa5d949c9f4e2cdbe7d805d02122c16065bfb8d95c129372d65a291a0b"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c872814b77a4e84afa293a1bee08c14daed1068b2bb1cc312edbf020bbbca2b"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850720e1b383df199b8433a20e02b25b72f0fded28bc03c5bd79e2ce7ef050be"}, + {file = "rpds_py-0.19.0-cp39-none-win32.whl", hash = "sha256:ce84a7efa5af9f54c0aa7692c45861c1667080814286cacb9958c07fc50294fb"}, + {file = "rpds_py-0.19.0-cp39-none-win_amd64.whl", hash = "sha256:1c26da90b8d06227d7769f34915913911222d24ce08c0ab2d60b354e2d9c7aff"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"}, ] [[package]] @@ -2234,19 +2161,18 @@ files = [ [[package]] name = "setuptools" -version = "69.5.1" +version = "70.3.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, + {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2283,27 +2209,27 @@ files = [ [[package]] name = "sphinx" -version = "4.5.0" +version = "7.1.2" description = "Python documentation generator" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, - {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, + {file = "sphinx-7.1.2-py3-none-any.whl", hash = "sha256:d170a81825b2fcacb6dfd5a0d7f578a053e45d3f2b153fecc948c37344eb4cbe"}, + {file = "sphinx-7.1.2.tar.gz", hash = "sha256:780f4d32f1d7d1126576e0e5ecc19dc32ab76cd24e950228dcf7b1f6d3d9e22f"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.18" -imagesize = "*" -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" -requests = ">=2.5.0" -snowballstemmer = ">=1.1" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.21" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.13" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" @@ -2313,41 +2239,43 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-autodoc-typehints" -version = "1.14.1" +version = "2.0.1" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "sphinx_autodoc_typehints-1.14.1-py3-none-any.whl", hash = "sha256:8b3b7da797fa007f7f39c518879a1bdae3a7dab96e170f4cb5a4b96390238369"}, - {file = "sphinx_autodoc_typehints-1.14.1.tar.gz", hash = "sha256:875de815a1ba609a4c0ebc620faecd8eb57183ba1f4cc6f8abba1790c140e960"}, + {file = "sphinx_autodoc_typehints-2.0.1-py3-none-any.whl", hash = "sha256:f73ae89b43a799e587e39266672c1075b2ef783aeb382d3ebed77c38a3fc0149"}, + {file = "sphinx_autodoc_typehints-2.0.1.tar.gz", hash = "sha256:60ed1e3b2c970acc0aa6e877be42d48029a9faec7378a17838716cacd8c10b12"}, ] [package.dependencies] -Sphinx = ">=4" +sphinx = ">=7.1.2" [package.extras] -testing = ["covdefaults (>=2)", "coverage (>=6)", "diff-cover (>=6.4)", "pytest (>=6)", "pytest-cov (>=3)", "sphobjinv (>=2)", "typing-extensions (>=3.5)"] -type-comments = ["typed-ast (>=1.4.0)"] +docs = ["furo (>=2024.1.29)"] +numpy = ["nptyping (>=2.5)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.4.2)", "diff-cover (>=8.0.3)", "pytest (>=8.0.1)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.9)"] [[package]] name = "sphinx-jinja2-compat" -version = "0.2.0.post1" +version = "0.3.0" description = "Patches Jinja2 v3 to restore compatibility with earlier Sphinx versions." optional = false python-versions = ">=3.6" files = [ - {file = "sphinx_jinja2_compat-0.2.0.post1-py3-none-any.whl", hash = "sha256:f9d329174bdde8db19dc12c62528367196eb2f6b46c91754eca604acd0c0f6ad"}, - {file = "sphinx_jinja2_compat-0.2.0.post1.tar.gz", hash = "sha256:974289a12a9f402108dead621e9c15f7004e945d5cfcaea8d6419e94d3fa95a3"}, + {file = "sphinx_jinja2_compat-0.3.0-py3-none-any.whl", hash = "sha256:b1e4006d8e1ea31013fa9946d1b075b0c8d2a42c6e3425e63542c1e9f8be9084"}, + {file = "sphinx_jinja2_compat-0.3.0.tar.gz", hash = "sha256:f3c1590b275f42e7a654e081db5e3e5fb97f515608422bde94015ddf795dfe7c"}, ] [package.dependencies] jinja2 = ">=2.10" markupsafe = ">=1" +standard-imghdr = {version = "3.10.14", markers = "python_version >= \"3.13\""} [[package]] name = "sphinx-prompt" @@ -2384,57 +2312,57 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinx-tabs" -version = "3.3.1" +version = "3.4.5" description = "Tabbed views for Sphinx" optional = false -python-versions = "~=3.6" +python-versions = "~=3.7" files = [ - {file = "sphinx-tabs-3.3.1.tar.gz", hash = "sha256:d10dd7fb2700329b8e5948ab9f8e3ef54fff30f79d2e42cfd1b0089ae26e8c5e"}, - {file = "sphinx_tabs-3.3.1-py3-none-any.whl", hash = "sha256:73209aa769246501f6de9e33051cfd2d54f5900e0cc28a63367d8e4af4c0db5d"}, + {file = "sphinx-tabs-3.4.5.tar.gz", hash = "sha256:ba9d0c1e3e37aaadd4b5678449eb08176770e0fc227e769b6ce747df3ceea531"}, + {file = "sphinx_tabs-3.4.5-py3-none-any.whl", hash = "sha256:92cc9473e2ecf1828ca3f6617d0efc0aa8acb06b08c56ba29d1413f2f0f6cf09"}, ] [package.dependencies] -docutils = ">=0.17.0,<0.18.0" +docutils = "*" pygments = "*" -sphinx = ">=2,<5" +sphinx = "*" [package.extras] code-style = ["pre-commit (==2.13.0)"] -testing = ["bs4", "coverage", "pygments", "pytest (>=3.6,<4)", "pytest-cov", "pytest-regressions", "rinohtype", "sphinx-testing"] +testing = ["bs4", "coverage", "pygments", "pytest (>=7.1,<8)", "pytest-cov", "pytest-regressions", "rinohtype"] [[package]] name = "sphinx-toolbox" -version = "3.0.0" +version = "3.7.0" description = "Box of handy tools for Sphinx 🧰 📔" optional = false python-versions = ">=3.7" files = [ - {file = "sphinx_toolbox-3.0.0-py3-none-any.whl", hash = "sha256:5474998039c28a59057ce8f36d658eeaae0cd8d793bbfde408dcfa8dbd00e5dc"}, - {file = "sphinx_toolbox-3.0.0.tar.gz", hash = "sha256:f5dff79b8644f1e010380802470862e397dc12cbcb737f3d42108ab37d9540da"}, + {file = "sphinx_toolbox-3.7.0-py3-none-any.whl", hash = "sha256:9ea800fb6b2ecc5f382ab3547b415029fe9603b0b7eeb4c15c5da77c1eb5f21a"}, + {file = "sphinx_toolbox-3.7.0.tar.gz", hash = "sha256:045db70e6d73241ff0b922f2e76597e2cc4904dbabef9fc6f3a46718b0e48d51"}, ] [package.dependencies] apeye = ">=0.4.0" autodocsumm = ">=0.2.0" beautifulsoup4 = ">=4.9.1" -cachecontrol = {version = ">=0.12.6", extras = ["filecache"]} +cachecontrol = {version = ">=0.13.0", extras = ["filecache"]} dict2css = ">=0.2.3" -docutils = ">=0.16,<0.18" +docutils = ">=0.16" domdf-python-tools = ">=2.9.0" +filelock = ">=3.8.0" html5lib = ">=1.1" -lockfile = ">=0.12.2" "ruamel.yaml" = ">=0.16.12" sphinx = ">=3.2.0" -sphinx-autodoc-typehints = ">=1.11.1,<=1.14.1" +sphinx-autodoc-typehints = ">=1.11.1" sphinx-jinja2-compat = ">=0.1.0" sphinx-prompt = ">=1.1.0" -sphinx-tabs = ">=1.2.1,<3.4.0" +sphinx-tabs = ">=1.2.1,<3.5.0" tabulate = ">=0.8.7" typing-extensions = ">=3.7.4.3,<3.10.0.1 || >3.10.0.1" [package.extras] -all = ["coincidence (>=0.4.3)", "pygments (>=2.7.4)"] -testing = ["coincidence (>=0.4.3)", "pygments (>=2.7.4)"] +all = ["coincidence (>=0.4.3)", "pygments (>=2.7.4,<=2.13.0)"] +testing = ["coincidence (>=0.4.3)", "pygments (>=2.7.4,<=2.13.0)"] [[package]] name = "sphinxcontrib-applehelp" @@ -2539,6 +2467,17 @@ files = [ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] +[[package]] +name = "standard-imghdr" +version = "3.10.14" +description = "Standard library imghdr redistribution. \"dead battery\"." +optional = false +python-versions = "*" +files = [ + {file = "standard_imghdr-3.10.14-py3-none-any.whl", hash = "sha256:cdf6883163349624dee9a81d2853a20260337c4cd41c04e99c082e01833a08e2"}, + {file = "standard_imghdr-3.10.14.tar.gz", hash = "sha256:2598fe2e7c540dbda34b233295e10957ab8dc8ac6f3bd9eaa8d38be167232e52"}, +] + [[package]] name = "stringcase" version = "1.2.0" @@ -2616,13 +2555,13 @@ python-dotenv = "*" [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -2656,13 +2595,13 @@ devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3) [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -2673,13 +2612,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.1" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.1-py3-none-any.whl", hash = "sha256:7aa9982a728ae5892558bff6a2839c00b9ed145523ece2274fad6f414690ae75"}, - {file = "virtualenv-20.26.1.tar.gz", hash = "sha256:604bfdceaeece392802e6ae48e69cec49168b9c5f4a44e483963f9242eb0e78b"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -2721,24 +2660,23 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "zipp" -version = "3.18.1" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] -bigquery = ["db-dtypes", "google-cloud-bigquery"] hdf5 = ["h5py"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "b9029d4c60cbf5037f3956043d0a616754a16a5df2edc2e7d76f664d58428d68" +content-hash = "ba321fc60fa5f607da93d349ad9579fd02f0f17bd2c4ee43eb0b4d53ea705ef7" diff --git a/pyproject.toml b/pyproject.toml index a830fa7e2..cdcf3ce85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "octue" -version = "0.55.0" +version = "0.56.0" description = "A package providing template applications for data services, and a python SDK to the Octue API." readme = "README.md" authors = ["Marcus Lugg ", "Thomas Clark "] @@ -27,7 +27,7 @@ coolname = "^2" Flask = "^2" google-auth = ">=1.27.0,<3" google-cloud-pubsub = "^2.5" -google-cloud-secret-manager = "^2.3" +google-cloud-secret-manager = "^2.20" google-cloud-storage = ">=1.35.1, <3" google-crc32c = "^1.1" gunicorn = "^22" @@ -36,12 +36,10 @@ pyyaml = "^6" h5py = { version = "^3.6", optional = true } twined = "0.5.5" packaging = ">=20.4" -google-cloud-bigquery = { version = "^3.18.0", optional = true } -db-dtypes = { version = "^1.2.0", optional = true } +google-cloud-bigquery = "^3.18.0" [tool.poetry.extras] hdf5 = ["h5py"] -bigquery = ["google-cloud-bigquery", "db-dtypes"] [tool.poetry.scripts] octue = "octue.cli:octue_cli" @@ -62,10 +60,10 @@ dateparser = "1.1.1" stringcase = "1.2.0" pandas = "^1.3" # Documentation -Sphinx = ">=4,<5" +Sphinx = ">=5,<8" sphinx-rtd-theme = ">=1,<2" -sphinx-tabs = ">=3,<4" -sphinx-toolbox = "3.0.0" +sphinx-tabs = ">=3.4.0,<4" +sphinx-toolbox = "^3" isort = "5.10.1" [build-system] diff --git a/terraform/bigquery.tf b/terraform/bigquery.tf index 4c0034b89..bd8bfa630 100644 --- a/terraform/bigquery.tf +++ b/terraform/bigquery.tf @@ -45,6 +45,11 @@ resource "google_bigquery_table" "test_table" { "type": "STRING", "mode": "REQUIRED" }, + { + "name": "parent", + "type": "STRING", + "mode": "REQUIRED" + }, { "name": "sender", "type": "STRING", @@ -66,12 +71,17 @@ resource "google_bigquery_table" "test_table" { "mode": "REQUIRED" }, { - "name": "question_uuid", + "name": "originator_question_uuid", "type": "STRING", "mode": "REQUIRED" }, { - "name": "order", + "name": "parent_question_uuid", + "type": "STRING", + "mode": "NULLABLE" + }, + { + "name": "question_uuid", "type": "STRING", "mode": "REQUIRED" }, diff --git a/terraform/functions.tf b/terraform/functions.tf index 250ca2882..7fca55d58 100644 --- a/terraform/functions.tf +++ b/terraform/functions.tf @@ -9,7 +9,7 @@ resource "google_cloudfunctions2_function" "event_handler" { source { storage_source { bucket = "twined-gcp" - object = "event_handler/0.5.0.zip" + object = "event_handler/0.6.1.zip" } } } diff --git a/terraform/iam.tf b/terraform/iam.tf index 4ae3efa0a..1887878b5 100644 --- a/terraform/iam.tf +++ b/terraform/iam.tf @@ -1,27 +1,3 @@ -# You need to start with a service account called "terraform" which has both the 'editor' and 'owner' basic permissions. -# This allows it to assign permissions to resources per https://cloud.google.com/iam/docs/understanding-roles -# -# To create domain-named storage buckets using terraform, you first have to verify ownership of the root domain, or -# "property", (eg octue.com) using the google search console. Once verified, you need to add the service account with -# which terraform acts ( eg terraform@octue-sdk-python.iam.gserviceaccount.com ) to Google Search Console > Settings > Users -# and Permissions, with "Owner" level permission. - -resource "google_service_account" "dev_cortadocodes_service_account" { - account_id = "dev-cortadocodes" - description = "Allow cortadocodes to access developer-specific resources" - display_name = "dev-cortadocodes" - project = var.project -} - - -resource "google_service_account" "github_actions_service_account" { - account_id = "github-actions" - description = "Allow GitHub Actions to test the SDK." - display_name = "github-actions" - project = var.project -} - - resource "google_project_iam_binding" "iam_serviceaccountuser" { project = var.project role = "roles/iam.serviceAccountUser" diff --git a/terraform/iam_service_accounts.tf b/terraform/iam_service_accounts.tf new file mode 100644 index 000000000..fa611b09c --- /dev/null +++ b/terraform/iam_service_accounts.tf @@ -0,0 +1,22 @@ +# You need to start with a service account called "terraform" which has both the 'editor' and 'owner' basic permissions. +# This allows it to assign permissions to resources per https://cloud.google.com/iam/docs/understanding-roles +# +# To create domain-named storage buckets using terraform, you first have to verify ownership of the root domain, or +# "property", (eg octue.com) using the google search console. Once verified, you need to add the service account with +# which terraform acts ( eg terraform@octue-sdk-python.iam.gserviceaccount.com ) to Google Search Console > Settings > Users +# and Permissions, with "Owner" level permission. + +resource "google_service_account" "dev_cortadocodes_service_account" { + account_id = "dev-cortadocodes" + description = "Allow cortadocodes to access developer-specific resources" + display_name = "dev-cortadocodes" + project = var.project +} + + +resource "google_service_account" "github_actions_service_account" { + account_id = "github-actions" + description = "Allow GitHub Actions to test the SDK." + display_name = "github-actions" + project = var.project +} diff --git a/tests/cloud/deployment/google/cloud_run/test_cloud_run_deployment.py b/tests/cloud/deployment/google/cloud_run/test_cloud_run_deployment.py index f4fde8375..056d67a91 100644 --- a/tests/cloud/deployment/google/cloud_run/test_cloud_run_deployment.py +++ b/tests/cloud/deployment/google/cloud_run/test_cloud_run_deployment.py @@ -4,11 +4,13 @@ from unittest import TestCase import twined.exceptions +from octue.cloud.events.replayer import EventReplayer +from octue.cloud.events.validation import is_event_valid from octue.cloud.pub_sub.bigquery import get_events from octue.resources import Child -EXAMPLE_SERVICE_SRUID = "octue/example-service-cloud-run:0.4.2" +EXAMPLE_SERVICE_SRUID = "octue/example-service:0.5.0" @unittest.skipUnless( @@ -46,18 +48,25 @@ def test_asynchronous_question(self): self.assertIsNone(answer) # Wait for question to complete. - time.sleep(10) + time.sleep(15) - events = get_events( - table_id="octue_sdk_python_test_dataset.service-events", - sender=EXAMPLE_SERVICE_SRUID, - question_uuid=question_uuid, - kind="result", + events = get_events(table_id="octue_sdk_python_test_dataset.service-events", question_uuid=question_uuid) + + self.assertTrue( + is_event_valid( + event=events[0]["event"], + attributes=events[0]["attributes"], + recipient=None, + parent_sdk_version=None, + child_sdk_version=None, + ) ) + replayer = EventReplayer() + answer = replayer.handle_events(events) + # Check the output values. - self.assertEqual(list(events[0]["event"]["output_values"]), [1, 2, 3, 4, 5]) + self.assertEqual(list(answer["output_values"]), [1, 2, 3, 4, 5]) - # Check that the output dataset and its files can be accessed. - with events[0]["event"]["output_manifest"].datasets["example_dataset"].files.one() as (datafile, f): + with answer["output_manifest"].datasets["example_dataset"].files.one() as (datafile, f): self.assertEqual(f.read(), "This is some example service output.") diff --git a/tests/cloud/deployment/google/cloud_run/test_flask_app.py b/tests/cloud/deployment/google/cloud_run/test_flask_app.py index 2bc461f04..bef35f05a 100644 --- a/tests/cloud/deployment/google/cloud_run/test_flask_app.py +++ b/tests/cloud/deployment/google/cloud_run/test_flask_app.py @@ -1,15 +1,34 @@ -import base64 -import json +import copy +import logging +import os import uuid -from unittest import TestCase, mock +from unittest import TestCase +from unittest.mock import patch + +from google.api_core.exceptions import NotFound from octue.cloud.deployment.google.cloud_run import flask_app +from octue.configuration import ServiceConfiguration +from octue.utils.patches import MultiPatcher +from tests import TESTS_DIR flask_app.app.testing = True -class TestFlaskApp(TestCase): +TWINE_FILE_PATH = os.path.join(TESTS_DIR, "data", "twines", "valid_schema_twine.json") + +MOCK_CONFIGURATION = ServiceConfiguration( + namespace="testing", + name="test-app", + app_source_path=os.path.join(TESTS_DIR, "test_app_modules", "app_module"), + twine_path=TWINE_FILE_PATH, + app_configuration_path="blah.json", + event_store_table_id="mock-event-store-table-id", +) + + +class TestInvalidPayloads(TestCase): def test_post_to_index_with_no_payload_results_in_400_error(self): """Test that a 400 (bad request) error code is returned if no payload is sent to the Flask endpoint.""" with flask_app.app.test_client() as client: @@ -25,80 +44,196 @@ def test_post_to_index_with_invalid_payload_results_in_400_error(self): response = client.post("/", json={"message": "data", "deliveryAttempt": 1}) self.assertEqual(response.status_code, 400) - def test_post_to_index_with_valid_payload(self): - """Test that the Flask endpoint returns a 204 (ok, no content) response to a valid payload.""" + +class TestQuestionRedelivery(TestCase): + def test_warning_logged_if_no_event_store_provided(self): + """Test that the question is allowed to proceed to analysis and a warning is logged if the event store cannot be + checked because one hasn't been specified in the service configuration. + """ + mock_configuration = copy.deepcopy(MOCK_CONFIGURATION) + mock_configuration.event_store_table_id = None + with flask_app.app.test_client() as client: - with mock.patch("octue.cloud.deployment.google.cloud_run.flask_app.answer_question"): - - response = client.post( - "/", - json={ - "deliveryAttempt": 1, - "subscription": "projects/my-project/subscriptions/my-subscription", - "message": { - "data": base64.b64encode( - json.dumps({"input_values": [1, 2, 3], "input_manifest": None}).encode() - ).decode(), - "attributes": {"question_uuid": str(uuid.uuid4()), "forward_logs": "1"}, - }, - }, - ) + with patch("octue.cloud.deployment.google.cloud_run.flask_app.answer_question") as mock_answer_question: + with patch("octue.configuration.ServiceConfiguration.from_file", return_value=mock_configuration): + with self.assertLogs(level=logging.WARNING) as logging_context: + response = client.post( + "/", + json={ + "deliveryAttempt": 1, + "subscription": "projects/my-project/subscriptions/my-subscription", + "message": { + "data": {}, + "attributes": { + "question_uuid": str(uuid.uuid4()), + "forward_logs": "1", + "retry_count": "0", + }, + }, + }, + ) - self.assertEqual(response.status_code, 204) + self.assertTrue( + logging_context.output[0].endswith( + "Cannot check if question has been redelivered as the 'event_store_table_id' key hasn't been set in " + "the service configuration (`octue.yaml` file)." + ) + ) - def test_redelivered_questions_are_acknowledged_and_ignored(self): - """Test that redelivered questions are acknowledged and then ignored.""" - question_uuid = "fcd7aad7-dbf0-47d2-8984-220d493df2c1" + self.assertEqual(response.status_code, 204) + mock_answer_question.assert_called_once() + + def test_warning_logged_if_event_store_not_found(self): + """Test that the question is allowed to proceed to analysis and a warning is logged if the event store cannot be + found. + """ + mock_configuration = copy.deepcopy(MOCK_CONFIGURATION) + mock_configuration.event_store_table_id = "nonexistent.table" - with mock.patch( - "octue.utils.metadata.load_local_metadata_file", - return_value={"delivered_questions": {question_uuid}}, - ): - with mock.patch("octue.utils.metadata.overwrite_local_metadata_file"): - with flask_app.app.test_client() as client: - with mock.patch( - "octue.cloud.deployment.google.cloud_run.flask_app.answer_question" - ) as mock_answer_question: + multi_patcher = MultiPatcher( + patches=[ + patch("octue.configuration.ServiceConfiguration.from_file", return_value=mock_configuration), + patch("octue.cloud.deployment.google.cloud_run.flask_app.get_events", side_effect=NotFound("blah")), + ] + ) + with flask_app.app.test_client() as client: + with patch("octue.cloud.deployment.google.cloud_run.flask_app.answer_question") as mock_answer_question: + with multi_patcher: + with self.assertLogs(level=logging.WARNING) as logging_context: response = client.post( "/", json={ + "deliveryAttempt": 1, "subscription": "projects/my-project/subscriptions/my-subscription", "message": { "data": {}, - "attributes": {"question_uuid": question_uuid, "forward_logs": "1"}, + "attributes": { + "question_uuid": str(uuid.uuid4()), + "forward_logs": "1", + "retry_count": "0", + }, }, }, ) + self.assertTrue( + logging_context.output[0].endswith( + "Cannot check if question has been redelivered as no event store table was found with the ID " + "'nonexistent.table'; check that the 'event_store_table_id' key in the service configuration " + "(`octue.yaml` file) is correct." + ) + ) + self.assertEqual(response.status_code, 204) - mock_answer_question.assert_not_called() + mock_answer_question.assert_called_once() - def test_set_of_delivered_questions_is_created_and_stored_when_local_metadata_file_did_not_previously_exist(self): - """Test that the set of delivered questions is created and stored in the local metadata when the local metadata - file didn't previously exist. - """ + def test_new_question(self): + """Test that a new question is checked against the event store and allowed to proceed to analysis.""" + multi_patcher = MultiPatcher( + patches=[ + patch("octue.configuration.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION), + patch("octue.cloud.deployment.google.cloud_run.flask_app.get_events", return_value=[]), + ] + ) + + with flask_app.app.test_client() as client: + with patch("octue.cloud.deployment.google.cloud_run.flask_app.answer_question") as mock_answer_question: + with multi_patcher: + with self.assertLogs() as logging_context: + response = client.post( + "/", + json={ + "deliveryAttempt": 1, + "subscription": "projects/my-project/subscriptions/my-subscription", + "message": { + "data": {}, + "attributes": { + "question_uuid": str(uuid.uuid4()), + "forward_logs": "1", + "retry_count": "0", + }, + }, + }, + ) + + self.assertTrue(logging_context.output[0].endswith("is a new question.")) + self.assertEqual(response.status_code, 204) + mock_answer_question.assert_called_once() + + def test_redelivered_questions_are_acknowledged_and_dropped(self): + """Test that questions undesirably redelivered by Pub/Sub are acknowledged and dropped.""" question_uuid = "fcd7aad7-dbf0-47d2-8984-220d493df2c1" - local_metadata = {} - with mock.patch("octue.utils.metadata.load_local_metadata_file", return_value=local_metadata): - with mock.patch("octue.utils.metadata.overwrite_local_metadata_file"): - with flask_app.app.test_client() as client: - with mock.patch( - "octue.cloud.deployment.google.cloud_run.flask_app.answer_question" - ) as mock_answer_question: + multi_patcher = MultiPatcher( + patches=[ + patch("octue.configuration.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION), + patch( + "octue.cloud.deployment.google.cloud_run.flask_app.get_events", + return_value=[{"attributes": {"retry_count": "0"}}], + ), + ] + ) + with flask_app.app.test_client() as client: + with patch("octue.cloud.deployment.google.cloud_run.flask_app.answer_question") as mock_answer_question: + with self.assertLogs(level=logging.WARNING) as logging_context: + with multi_patcher: response = client.post( "/", json={ "subscription": "projects/my-project/subscriptions/my-subscription", "message": { "data": {}, - "attributes": {"question_uuid": question_uuid, "forward_logs": "1"}, + "attributes": { + "question_uuid": question_uuid, + "forward_logs": "1", + "retry_count": "0", + }, }, }, ) + self.assertIn( + "has already been received by the service. It will now be acknowledged and dropped to prevent further " + "redundant redelivery.", + logging_context.output[0], + ) + + self.assertEqual(response.status_code, 204) + mock_answer_question.assert_not_called() + + def test_retried_questions_are_allowed(self): + """Test that questions explicitly retried by the SDK are allowed to proceed to analysis.""" + question_uuid = "fcd7aad7-dbf0-47d2-8984-220d493df2c1" + + multi_patcher = MultiPatcher( + patches=[ + patch("octue.configuration.ServiceConfiguration.from_file", return_value=MOCK_CONFIGURATION), + patch( + "octue.cloud.deployment.google.cloud_run.flask_app.get_events", + return_value=[{"attributes": {"retry_count": "0"}}], + ), + ] + ) + + with flask_app.app.test_client() as client: + with patch("octue.cloud.deployment.google.cloud_run.flask_app.answer_question") as mock_answer_question: + with multi_patcher: + response = client.post( + "/", + json={ + "subscription": "projects/my-project/subscriptions/my-subscription", + "message": { + "data": {}, + "attributes": { + "question_uuid": question_uuid, + "forward_logs": "1", + "retry_count": "1", + }, + }, + }, + ) + self.assertEqual(response.status_code, 204) mock_answer_question.assert_called_once() - self.assertEqual(local_metadata, {"delivered_questions": {question_uuid}}) diff --git a/tests/cloud/deployment/google/test_answer_pub_sub_question.py b/tests/cloud/deployment/google/test_answer_pub_sub_question.py index f9a9d4e33..37513b674 100644 --- a/tests/cloud/deployment/google/test_answer_pub_sub_question.py +++ b/tests/cloud/deployment/google/test_answer_pub_sub_question.py @@ -33,7 +33,17 @@ def test_with_no_app_configuration_file(self): "octue.cloud.deployment.google.answer_pub_sub_question.Runner.from_configuration" ) as mock_constructor: answer_question( - question={"data": {}, "attributes": {"question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29"}}, + question={ + "data": {}, + "attributes": { + "question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29", + "parent_question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29", + "originator_question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29", + "parent": "some/originator:service", + "originator": "some/originator:service", + "retry_count": 0, + }, + }, project_name="a-project-name", ) @@ -89,7 +99,14 @@ class MockOpenForConfigurationFiles(MockOpen): answer_question( question={ "data": {}, - "attributes": {"question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29"}, + "attributes": { + "question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29", + "parent_question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29", + "originator_question_uuid": "8c859f87-b594-4297-883f-cd1c7718ef29", + "parent": "some/originator:service", + "originator": "some/originator:service", + "retry_count": 0, + }, }, project_name="a-project-name", ) diff --git a/tests/cloud/emulators/test_child_emulator.py b/tests/cloud/emulators/test_child_emulator.py index 22e334176..40a04a300 100644 --- a/tests/cloud/emulators/test_child_emulator.py +++ b/tests/cloud/emulators/test_child_emulator.py @@ -239,13 +239,7 @@ def test_ask_with_monitor_message(self): def test_heartbeat_events_are_ignored(self): """Test that heartbeat events are ignored by the emulator.""" - events = [ - { - "kind": "heartbeat", - "datetime": "2023-11-23T14:25:38.142884", - }, - ] - + events = [{"kind": "heartbeat"}] child_emulator = ChildEmulator(backend=self.BACKEND, events=events) with self.assertLogs(level=logging.WARNING) as logging_context: @@ -272,7 +266,7 @@ def error_run_function(*args, **kwargs): subscription, _ = parent.ask(service_id=child.id, input_values={}) parent.wait_for_answer(subscription=subscription) - child_emulator = ChildEmulator(events=parent.received_events) + child_emulator = ChildEmulator(events=[event["event"] for event in parent.received_events]) with self.assertRaises(OSError): child_emulator.ask(input_values={}) @@ -295,7 +289,7 @@ def test_ask_more_than_one_question(self): class TestChildEmulatorJSONFiles(BaseTestCase): - TEST_FILES_DIRECTORY = os.path.join(TESTS_DIR, "cloud", "emulators", "valid_child_emulator_files") + TEST_FILES_DIRECTORY = os.path.join(TESTS_DIR, "data", "valid_child_emulator_files") def test_with_empty_file(self): """Test that a child emulator can be instantiated from an empty JSON file (a JSON file with only an empty diff --git a/tests/cloud/events/test_replayer.py b/tests/cloud/events/test_replayer.py index 99e073dd6..3d41ad3c0 100644 --- a/tests/cloud/events/test_replayer.py +++ b/tests/cloud/events/test_replayer.py @@ -10,11 +10,8 @@ class TestEventReplayer(unittest.TestCase): def test_with_no_events(self): """Test that `None` is returned if no events are passed in.""" - with self.assertLogs(level=logging.DEBUG) as logging_context: - result = EventReplayer().handle_events(events=[]) - + result = EventReplayer().handle_events(events=[]) self.assertIsNone(result) - self.assertIn("No events (or no valid events) were received.", logging_context.output[0]) def test_with_no_valid_events(self): """Test that `None` is returned if no valid events are received.""" @@ -23,20 +20,19 @@ def test_with_no_valid_events(self): self.assertIsNone(result) self.assertIn("received an event that doesn't conform", logging_context.output[1]) - self.assertIn("No events (or no valid events) were received.", logging_context.output[2]) def test_no_result_event(self): """Test that `None` is returned if no result event is received.""" event = { - "event": { - "datetime": "2024-03-06T15:44:18.156044", - "kind": "delivery_acknowledgement", - }, + "event": {"kind": "delivery_acknowledgement"}, "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "0", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -79,3 +75,16 @@ def test_with_events_including_result_event(self): }, }, ) + + def test_without_service_metadata_in_logs(self): + """Test that log messages are formatted to not include the service metadata if + `include_service_metadata_in_logs=False`. + """ + with open(os.path.join(TESTS_DIR, "data", "events.json")) as f: + events = json.load(f) + + with self.assertLogs() as logging_context: + EventReplayer(include_service_metadata_in_logs=False).handle_events(events) + + for log_message in logging_context.output: + self.assertNotIn("[octue/test-service:1.0.0 | d45c7e99-d610-413b-8130-dd6eef46dda6]", log_message) diff --git a/tests/cloud/pub_sub/test_bigquery.py b/tests/cloud/pub_sub/test_bigquery.py index 0057dd420..ce2248244 100644 --- a/tests/cloud/pub_sub/test_bigquery.py +++ b/tests/cloud/pub_sub/test_bigquery.py @@ -2,7 +2,6 @@ from unittest.mock import MagicMock, patch from octue.cloud.pub_sub.bigquery import get_events -from octue.exceptions import ServiceNotFound class MockEmptyResult: @@ -20,59 +19,127 @@ def query(self, *args, **kwargs): class TestGetEvents(TestCase): + def test_error_raised_if_no_question_uuid_type_provided(self): + """Test that an error is raised if none of `question_uuid`, `parent_question_uuid`, and + `originator_question_uuid` are provided. + """ + with self.assertRaises(ValueError): + get_events(table_id="blah") + + def test_error_if_more_than_one_question_uuid_type_provided(self): + """Test that an error is raised if more that one of `question_uuid`, `parent_question_uuid`, and + `originator_question_uuid` are provided. + """ + for kwargs in ( + {"question_uuid": "a", "parent_question_uuid": "b"}, + {"question_uuid": "a", "originator_question_uuid": "b"}, + {"parent_question_uuid": "a", "originator_question_uuid": "b"}, + ): + with self.subTest(kwargs=kwargs): + with self.assertRaises(ValueError): + get_events(table_id="blah", **kwargs) + def test_error_raised_if_event_kind_invalid(self): """Test that an error is raised if the event kind is invalid.""" with self.assertRaises(ValueError): - get_events( - table_id="blah", - sender="octue/test-service:1.0.0", - question_uuid="blah", - kind="frisbee_tournament", - ) - - def test_error_raised_if_no_events_found(self): - """Test that an error is raised if no events are found.""" + get_events(table_id="blah", question_uuid="blah", kind="frisbee_tournament") + + def test_no_events_found(self): + """Test that an empty list is returned if no events are found for the question UUID.""" with patch("octue.cloud.pub_sub.bigquery.Client", MockEmptyBigQueryClient): - with self.assertRaises(ServiceNotFound): - get_events(table_id="blah", sender="octue/test-service:1.0.0", question_uuid="blah") + events = get_events(table_id="blah", question_uuid="blah") + + self.assertEqual(events, []) + + def test_without_tail(self): + """Test the non-tail query.""" + with patch("octue.cloud.pub_sub.bigquery.Client") as mock_client: + get_events(table_id="blah", question_uuid="blah", tail=False) + + self.assertEqual( + mock_client.mock_calls[1].args[0], + "SELECT `originator_question_uuid`, `parent_question_uuid`, `question_uuid`, `kind`, `event`, `datetime`, " + "`uuid`, `originator`, `parent`, `sender`, `sender_type`, `sender_sdk_version`, `recipient`, " + "`other_attributes` FROM `blah`\nWHERE question_uuid=@relevant_question_uuid\n" + "ORDER BY `datetime` ASC\n" + "LIMIT @limit", + ) def test_without_kind(self): """Test the query used to retrieve events of all kinds.""" with patch("octue.cloud.pub_sub.bigquery.Client") as mock_client: - get_events(table_id="blah", sender="octue/test-service:1.0.0", question_uuid="blah") + get_events(table_id="blah", question_uuid="blah") self.assertEqual( mock_client.mock_calls[1].args[0], - "SELECT `event`, `kind`, `datetime`, `uuid`, `originator`, `sender`, `sender_type`, `sender_sdk_version`, " - "`recipient`, `order`, `other_attributes` FROM `blah`\nWHERE sender=@sender\n" - "AND question_uuid=@question_uuid\nORDER BY `order`\nLIMIT @limit", + "SELECT * FROM (\n" + "SELECT `originator_question_uuid`, `parent_question_uuid`, `question_uuid`, `kind`, `event`, `datetime`, " + "`uuid`, `originator`, `parent`, `sender`, `sender_type`, `sender_sdk_version`, `recipient`, " + "`other_attributes` FROM `blah`\nWHERE question_uuid=@relevant_question_uuid\n" + "ORDER BY `datetime` DESC\n" + "LIMIT @limit\n" + ") ORDER BY `datetime` ASC", ) def test_with_kind(self): """Test the query used to retrieve events of a specific kind.""" with patch("octue.cloud.pub_sub.bigquery.Client") as mock_client: - get_events(table_id="blah", sender="octue/test-service:1.0.0", question_uuid="blah", kind="result") + get_events(table_id="blah", question_uuid="blah", kind="result") self.assertEqual( mock_client.mock_calls[1].args[0], - "SELECT `event`, `kind`, `datetime`, `uuid`, `originator`, `sender`, `sender_type`, `sender_sdk_version`, " - "`recipient`, `order`, `other_attributes` FROM `blah`\nWHERE sender=@sender\n" - "AND question_uuid=@question_uuid\nAND kind='result'\nORDER BY `order`\nLIMIT @limit", + "SELECT * FROM (\n" + "SELECT `originator_question_uuid`, `parent_question_uuid`, `question_uuid`, `kind`, `event`, `datetime`, " + "`uuid`, `originator`, `parent`, `sender`, `sender_type`, `sender_sdk_version`, `recipient`, " + "`other_attributes` FROM `blah`\nWHERE question_uuid=@relevant_question_uuid\nAND kind='result'\n" + "ORDER BY `datetime` DESC\n" + "LIMIT @limit\n" + ") ORDER BY `datetime` ASC", ) def test_with_backend_metadata(self): """Test the query used to retrieve backend metadata in addition to events.""" with patch("octue.cloud.pub_sub.bigquery.Client") as mock_client: - get_events( - table_id="blah", - sender="octue/test-service:1.0.0", - question_uuid="blah", - include_backend_metadata=True, - ) + get_events(table_id="blah", question_uuid="blah", include_backend_metadata=True) + + self.assertEqual( + mock_client.mock_calls[1].args[0], + "SELECT * FROM (\n" + "SELECT `originator_question_uuid`, `parent_question_uuid`, `question_uuid`, `kind`, `event`, `datetime`, " + "`uuid`, `originator`, `parent`, `sender`, `sender_type`, `sender_sdk_version`, `recipient`, " + "`other_attributes`, `backend`, `backend_metadata` FROM `blah`\n" + "WHERE question_uuid=@relevant_question_uuid\n" + "ORDER BY `datetime` DESC\n" + "LIMIT @limit\n" + ") ORDER BY `datetime` ASC", + ) + + def test_with_parent_question_uuid(self): + with patch("octue.cloud.pub_sub.bigquery.Client") as mock_client: + get_events(table_id="blah", parent_question_uuid="blah") + + self.assertEqual( + mock_client.mock_calls[1].args[0], + "SELECT * FROM (\n" + "SELECT `originator_question_uuid`, `parent_question_uuid`, `question_uuid`, `kind`, `event`, `datetime`, " + "`uuid`, `originator`, `parent`, `sender`, `sender_type`, `sender_sdk_version`, `recipient`, " + "`other_attributes` FROM `blah`\nWHERE parent_question_uuid=@relevant_question_uuid\n" + "ORDER BY `datetime` DESC\n" + "LIMIT @limit\n" + ") ORDER BY `datetime` ASC", + ) + + def test_with_originator_parent_question_uuid(self): + with patch("octue.cloud.pub_sub.bigquery.Client") as mock_client: + get_events(table_id="blah", originator_question_uuid="blah") self.assertEqual( mock_client.mock_calls[1].args[0], - "SELECT `event`, `kind`, `datetime`, `uuid`, `originator`, `sender`, `sender_type`, `sender_sdk_version`, " - "`recipient`, `order`, `other_attributes`, `backend`, `backend_metadata` FROM `blah`\n" - "WHERE sender=@sender\nAND question_uuid=@question_uuid\nORDER BY `order`\nLIMIT @limit", + "SELECT * FROM (\n" + "SELECT `originator_question_uuid`, `parent_question_uuid`, `question_uuid`, `kind`, `event`, `datetime`, " + "`uuid`, `originator`, `parent`, `sender`, `sender_type`, `sender_sdk_version`, `recipient`, " + "`other_attributes` FROM `blah`\nWHERE originator_question_uuid=@relevant_question_uuid\n" + "ORDER BY `datetime` DESC\n" + "LIMIT @limit\n" + ") ORDER BY `datetime` ASC", ) diff --git a/tests/cloud/pub_sub/test_events.py b/tests/cloud/pub_sub/test_events.py index e7b32ccb7..b388ee6fc 100644 --- a/tests/cloud/pub_sub/test_events.py +++ b/tests/cloud/pub_sub/test_events.py @@ -1,5 +1,4 @@ import datetime -import math import uuid from unittest.mock import patch @@ -39,169 +38,66 @@ def tearDownClass(cls): cls.service_patcher.stop() def test_timeout(self): - """Test that a TimeoutError is raised if message handling takes longer than the given timeout.""" + """Test that a TimeoutError is raised if event handling takes longer than the given timeout.""" event_handler = GoogleCloudPubSubEventHandler( subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: message}, + event_handlers={"test": lambda event, attributes: None, "finish-test": lambda event, attributes: event}, schema={}, ) with self.assertRaises(TimeoutError): event_handler.handle_events(timeout=0) - def test_in_order_messages_are_handled_in_order(self): - """Test that messages received in order are handled in order.""" + def test_handle_events(self): + """Test events can be handled.""" event_handler = GoogleCloudPubSubEventHandler( subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, - schema={}, - ) - - child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - - messages = [ - { - "event": {"kind": "test", "order": 0}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 2}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "finish-test", "order": 3}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + event_handlers={ + "test": lambda event, attributes: None, + "finish-test": lambda event, attributes: "This is the result.", }, - ] - - for message in messages: - child._emit_event( - event=message["event"], - attributes=message["attributes"], - originator=self.parent.id, - recipient=self.parent.id, - order=message["event"]["order"], - ) - - result = event_handler.handle_events() - self.assertEqual(result, "This is the result.") - - self.assertEqual( - event_handler.handled_events, - [ - {"kind": "test", "order": 0}, - {"kind": "test", "order": 1}, - {"kind": "test", "order": 2}, - {"kind": "finish-test", "order": 3}, - ], - ) - - def test_out_of_order_messages_are_handled_in_order(self): - """Test that messages received out of order are handled in order.""" - event_handler = GoogleCloudPubSubEventHandler( - subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, schema={}, ) child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - messages = [ - { - "event": {"kind": "test", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 2}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, + events = [ { "event": {"kind": "test", "order": 0}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "finish-test", "order": 3}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - ] - - for message in messages: - child._emit_event( - event=message["event"], - attributes=message["attributes"], - originator=self.parent.id, - recipient=self.parent.id, - order=message["event"]["order"], - ) - - result = event_handler.handle_events() - - self.assertEqual(result, "This is the result.") - - self.assertEqual( - event_handler.handled_events, - [ - {"kind": "test", "order": 0}, - {"kind": "test", "order": 1}, - {"kind": "test", "order": 2}, - {"kind": "finish-test", "order": 3}, - ], - ) - - def test_out_of_order_messages_with_end_message_first_are_handled_in_order(self): - """Test that messages received out of order and with the final message (the message that triggers a value to be - returned) are handled in order. - """ - event_handler = GoogleCloudPubSubEventHandler( - subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, - schema={}, - ) - - child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - - messages = [ - { - "event": {"kind": "finish-test", "order": 3}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, { "event": {"kind": "test", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, { "event": {"kind": "test", "order": 2}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, { - "event": {"kind": "test", "order": 0}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "event": {"kind": "finish-test", "order": 3}, + "attributes": {"sender_type": "CHILD"}, }, ] - for message in messages: + for event in events: child._emit_event( - event=message["event"], - attributes=message["attributes"], + event=event["event"], + question_uuid=self.question_uuid, + parent_question_uuid=None, + originator_question_uuid=self.question_uuid, + attributes=event["attributes"], + parent=self.parent.id, originator=self.parent.id, recipient=self.parent.id, - order=message["event"]["order"], + retry_count=0, ) result = event_handler.handle_events() - self.assertEqual(result, "This is the result.") self.assertEqual( - event_handler.handled_events, + [event["event"] for event in event_handler.handled_events], [ {"kind": "test", "order": 0}, {"kind": "test", "order": 1}, @@ -211,75 +107,81 @@ def test_out_of_order_messages_with_end_message_first_are_handled_in_order(self) ) def test_no_timeout(self): - """Test that message handling works with no timeout.""" + """Test that event handling works with no timeout.""" event_handler = GoogleCloudPubSubEventHandler( subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, + event_handlers={ + "test": lambda event, attributes: None, + "finish-test": lambda event, attributes: "This is the result.", + }, schema={}, ) child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - messages = [ + events = [ { "event": {"kind": "test", "order": 0}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, { "event": {"kind": "test", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, { "event": {"kind": "finish-test", "order": 2}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, ] - for message in messages: + for event in events: child._emit_event( - event=message["event"], - attributes=message["attributes"], + event=event["event"], + question_uuid=self.question_uuid, + parent_question_uuid=None, + originator_question_uuid=self.question_uuid, + attributes=event["attributes"], + parent=self.parent.id, originator=self.parent.id, recipient=self.parent.id, - order=message["event"]["order"], + retry_count=0, ) result = event_handler.handle_events(timeout=None) self.assertEqual(result, "This is the result.") self.assertEqual( - event_handler.handled_events, + [event["event"] for event in event_handler.handled_events], [{"kind": "test", "order": 0}, {"kind": "test", "order": 1}, {"kind": "finish-test", "order": 2}], ) def test_delivery_acknowledgement(self): - """Test that a delivery acknowledgement message is handled correctly.""" - event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription, recipient=self.parent) + """Test that a delivery acknowledgement event is handled correctly.""" + event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription) child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - messages = [ + events = [ { - "event": { - "kind": "delivery_acknowledgement", - "datetime": datetime.datetime.utcnow().isoformat(), - "order": 0, - }, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "event": {"kind": "delivery_acknowledgement", "order": 0}, + "attributes": {"sender_type": "CHILD"}, }, { "event": {"kind": "result", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, ] - for message in messages: + for event in events: child._emit_event( - event=message["event"], - attributes=message["attributes"], + event=event["event"], + question_uuid=self.question_uuid, + parent_question_uuid=None, + originator_question_uuid=self.question_uuid, + attributes=event["attributes"], + parent=self.parent.id, originator=self.parent.id, recipient=self.parent.id, - order=message["event"]["order"], + retry_count=0, ) result = event_handler.handle_events() @@ -287,7 +189,7 @@ def test_delivery_acknowledgement(self): def test_error_raised_if_heartbeat_not_received_before_checked(self): """Test that an error is raised if a heartbeat isn't received before a heartbeat is first checked for.""" - event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription, recipient=self.parent) + event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription) with self.assertRaises(TimeoutError) as error: event_handler.handle_events(maximum_heartbeat_interval=0) @@ -297,7 +199,7 @@ def test_error_raised_if_heartbeat_not_received_before_checked(self): def test_error_raised_if_heartbeats_stop_being_received(self): """Test that an error is raised if heartbeats stop being received within the maximum interval.""" - event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription, recipient=self.parent) + event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription) event_handler._last_heartbeat = datetime.datetime.now() - datetime.timedelta(seconds=30) with self.assertRaises(TimeoutError) as error: @@ -307,32 +209,32 @@ def test_error_raised_if_heartbeats_stop_being_received(self): def test_error_not_raised_if_heartbeat_has_been_received_in_maximum_allowed_interval(self): """Test that an error is not raised if a heartbeat has been received in the maximum allowed interval.""" - event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription, recipient=self.parent) + event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription) child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) event_handler._last_heartbeat = datetime.datetime.now() - messages = [ + events = [ { - "event": { - "kind": "delivery_acknowledgement", - "datetime": datetime.datetime.utcnow().isoformat(), - "order": 0, - }, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "event": {"kind": "delivery_acknowledgement", "order": 0}, + "attributes": {"sender_type": "CHILD"}, }, { "event": {"kind": "result", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, + "attributes": {"sender_type": "CHILD"}, }, ] - for message in messages: + for event in events: child._emit_event( - event=message["event"], - attributes=message["attributes"], + event=event["event"], + question_uuid=self.question_uuid, + parent_question_uuid=None, + originator_question_uuid=self.question_uuid, + attributes=event["attributes"], + parent=self.parent.id, originator=self.parent.id, recipient=self.parent.id, - order=message["event"]["order"], + retry_count=0, ) with patch( @@ -343,293 +245,18 @@ def test_error_not_raised_if_heartbeat_has_been_received_in_maximum_allowed_inte def test_time_since_last_heartbeat_is_none_if_no_heartbeat_received_yet(self): """Test that the time since the last heartbeat is `None` if no heartbeat has been received yet.""" - event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription, recipient=self.parent) + event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription) self.assertIsNone(event_handler._time_since_last_heartbeat) def test_total_run_time_is_none_if_handle_events_has_not_been_called(self): - """Test that the total run time for the message handler is `None` if the `handle_events` method has not been + """Test that the total run time for the event handler is `None` if the `handle_events` method has not been called. """ - event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription, recipient=self.parent) + event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription) self.assertIsNone(event_handler.total_run_time) - def test_time_since_missing_message_is_none_if_no_unhandled_missing_messages(self): - """Test that the `time_since_missing_message` property is `None` if there are no unhandled missing messages.""" - event_handler = GoogleCloudPubSubEventHandler(subscription=self.subscription, recipient=self.parent) - self.assertIsNone(event_handler.time_since_missing_event) - - def test_missing_messages_at_start_can_be_skipped(self): - """Test that missing messages at the start of the event stream can be skipped if they aren't received after a - given time period if subsequent messages have been received. - """ - event_handler = GoogleCloudPubSubEventHandler( - subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, - schema={}, - skip_missing_events_after=0, - ) - - child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - - # Simulate the first two messages not being received. - messages = [ - { - "event": {"kind": "test", "order": 2}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 3}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 4}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "finish-test", "order": 5}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - ] - - for message in messages: - child._emit_event( - event=message["event"], - attributes=message["attributes"], - originator=self.parent.id, - recipient=self.parent.id, - order=message["event"]["order"], - ) - with self.assertLogs() as logging_context: - result = event_handler.handle_events() - - self.assertIn( - f"2 consecutive events missing for question {self.question_uuid!r} after 0s - skipping to next earliest " - f"waiting event (event 2).", - logging_context.output[0], - ) - - self.assertEqual(result, "This is the result.") - self.assertEqual( - event_handler.handled_events, - [ - {"kind": "test", "order": 2}, - {"kind": "test", "order": 3}, - {"kind": "test", "order": 4}, - {"kind": "finish-test", "order": 5}, - ], - ) - - def test_missing_messages_in_middle_can_skipped(self): - """Test that missing messages in the middle of the event stream can be skipped.""" - event_handler = GoogleCloudPubSubEventHandler( - subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, - schema={}, - skip_missing_events_after=0, - ) - - child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - - # Send three consecutive messages. - messages = [ - { - "event": {"kind": "test", "order": 0}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 2}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - ] - - for message in messages: - child._emit_event( - event=message["event"], - attributes=message["attributes"], - originator=self.parent.id, - recipient=self.parent.id, - order=message["event"]["order"], - ) - - # Send a final message. - child._emit_event( - event={"kind": "finish-test", "order": 5}, - attributes={"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - originator=self.parent.id, - recipient=self.parent.id, - # Simulate missing messages. - order=5, - ) - - with self.assertLogs() as logging_context: - event_handler.handle_events() - - self.assertIn( - f"2 consecutive events missing for question {self.question_uuid!r} after 0s - skipping to next earliest " - f"waiting event (event 5).", - logging_context.output[0], - ) - - # Check that all the non-missing messages were handled. - self.assertEqual( - event_handler.handled_events, - [ - {"kind": "test", "order": 0}, - {"kind": "test", "order": 1}, - {"kind": "test", "order": 2}, - {"kind": "finish-test", "order": 5}, - ], - ) - - def test_multiple_blocks_of_missing_messages_in_middle_can_skipped(self): - """Test that multiple blocks of missing messages in the middle of the event stream can be skipped.""" - event_handler = GoogleCloudPubSubEventHandler( - subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, - schema={}, - skip_missing_events_after=0, - ) - - child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - - # Send three consecutive messages. - messages = [ - { - "event": {"kind": "test", "order": 0}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 1}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 2}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - ] - - for message in messages: - child._emit_event( - event=message["event"], - attributes=message["attributes"], - originator=self.parent.id, - recipient=self.parent.id, - order=message["event"]["order"], - ) - - # Send another message. - child._emit_event( - event={"kind": "test", "order": 5}, - attributes={"order": 5, "question_uuid": self.question_uuid, "sender_type": "CHILD"}, - originator=self.parent.id, - recipient=self.parent.id, - # Simulate missing messages. - order=5, - ) - - # Send more consecutive messages. - messages = [ - { - "event": {"kind": "test", "order": 20}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 21}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "test", "order": 22}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - { - "event": {"kind": "finish-test", "order": 23}, - "attributes": {"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - }, - ] - - for message in messages: - child._emit_event( - event=message["event"], - attributes=message["attributes"], - originator=self.parent.id, - recipient=self.parent.id, - # Simulate more missing messages. - order=message["event"]["order"], - ) - - with self.assertLogs() as logging_context: - event_handler.handle_events() - - self.assertIn( - f"2 consecutive events missing for question {self.question_uuid!r} after 0s - skipping to next earliest " - f"waiting event (event 5).", - logging_context.output[0], - ) - - self.assertIn( - f"14 consecutive events missing for question {self.question_uuid!r} after 0s - skipping to next earliest " - f"waiting event (event 20).", - logging_context.output[1], - ) - - # Check that all the non-missing messages were handled. - self.assertEqual( - event_handler.handled_events, - [ - {"kind": "test", "order": 0}, - {"kind": "test", "order": 1}, - {"kind": "test", "order": 2}, - {"kind": "test", "order": 5}, - {"kind": "test", "order": 20}, - {"kind": "test", "order": 21}, - {"kind": "test", "order": 22}, - {"kind": "finish-test", "order": 23}, - ], - ) - - def test_all_messages_missing_apart_from_result(self): - """Test that the result message is still handled if all other messages are missing.""" - event_handler = GoogleCloudPubSubEventHandler( - subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, - schema={}, - skip_missing_events_after=0, - ) - - child = MockService(backend=GCPPubSubBackend(project_name=TEST_PROJECT_NAME)) - - # Send the result message. - child._emit_event( - event={"kind": "finish-test", "order": 1000}, - attributes={"question_uuid": self.question_uuid, "sender_type": "CHILD"}, - originator=self.parent.id, - recipient=self.parent.id, - # Simulate missing messages. - order=1000, - ) - - with self.assertLogs() as logging_context: - event_handler.handle_events() - - self.assertIn( - f"1000 consecutive events missing for question {self.question_uuid!r} after 0s - skipping to next earliest " - f"waiting event (event 1000).", - logging_context.output[0], - ) - # Check that the result message was handled. - self.assertEqual(event_handler.handled_events, [{"kind": "finish-test", "order": 1000}]) - - -class TestPullAndEnqueueAvailableMessages(BaseTestCase): +class TestPullAvailableEvents(BaseTestCase): service_patcher = ServicePatcher() @classmethod @@ -656,30 +283,26 @@ def tearDownClass(cls): """ cls.service_patcher.stop() - def test_pull_and_enqueue_available_events(self): - """Test that pulling and enqueuing a message works.""" + def test_pull_available_events(self): + """Test that pulling and enqueuing an event works.""" event_handler = GoogleCloudPubSubEventHandler( subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, + event_handlers={ + "test": lambda event, attributes: None, + "finish-test": lambda event, attributes: "This is the result.", + }, schema={}, ) - event_handler.question_uuid = self.question_uuid - event_handler.child_sruid = "my-org/my-service:1.0.0" - event_handler.child_sdk_version = "0.1.3" - event_handler.waiting_events = {} - - # Enqueue a mock message for a mock subscription to receive. - mock_message = {"kind": "test"} + # Enqueue a mock event for a mock subscription to receive. + mock_event = {"kind": "test"} MESSAGES[self.question_uuid] = [ MockMessage.from_primitive( - mock_message, + mock_event, attributes={ - "order": 0, "question_uuid": self.question_uuid, - "originator": self.parent.id, + "parent": self.parent.id, "sender": self.parent.id, "sender_type": "CHILD", "sender_sdk_version": "0.50.0", @@ -688,22 +311,20 @@ def test_pull_and_enqueue_available_events(self): ) ] - event_handler._pull_and_enqueue_available_events(timeout=10) - self.assertEqual(event_handler.waiting_events, {0: mock_message}) + events = event_handler._pull_available_events(timeout=10) + self.assertEqual(events[0][0], mock_event) - def test_timeout_error_raised_if_result_message_not_received_in_time(self): - """Test that a timeout error is raised if a result message is not received in time.""" + def test_timeout_error_raised_if_result_event_not_received_in_time(self): + """Test that a timeout error is raised if a result event is not received in time.""" event_handler = GoogleCloudPubSubEventHandler( subscription=self.subscription, - recipient=self.parent, - event_handlers={"test": lambda message: None, "finish-test": lambda message: "This is the result."}, + event_handlers={ + "test": lambda event, attributes: None, + "finish-test": lambda event, attributes: "This is the result.", + }, ) - event_handler.child_sdk_version = "0.1.3" - event_handler.waiting_events = {} event_handler._start_time = 0 with self.assertRaises(TimeoutError): - event_handler._pull_and_enqueue_available_events(timeout=1e-6) - - self.assertEqual(event_handler._earliest_waiting_event_number, math.inf) + event_handler._pull_available_events(timeout=1e-6) diff --git a/tests/cloud/pub_sub/test_logging.py b/tests/cloud/pub_sub/test_logging.py index 77a8a76c2..000d9e42e 100644 --- a/tests/cloud/pub_sub/test_logging.py +++ b/tests/cloud/pub_sub/test_logging.py @@ -5,7 +5,6 @@ from octue.cloud.emulators._pub_sub import MESSAGES, MockService from octue.cloud.emulators.child import ServicePatcher -from octue.cloud.events.counter import EventCounter from octue.cloud.pub_sub.logging import GoogleCloudPubSubHandler from octue.resources.service_backends import GCPPubSubBackend from tests.base import BaseTestCase @@ -44,9 +43,12 @@ def test_emit(self): GoogleCloudPubSubHandler( event_emitter=service._emit_event, question_uuid=question_uuid, + parent_question_uuid=None, + originator_question_uuid=question_uuid, + parent="another/service:1.0.0", originator="another/service:1.0.0", recipient="another/service:1.0.0", - order=EventCounter(), + retry_count=0, ).emit(log_record) self.assertEqual( @@ -58,6 +60,7 @@ def test_emit_with_non_json_serialisable_args(self): """Test that non-JSON-serialisable arguments to log messages are converted to their string representation before being serialised and published to the Pub/Sub topic. """ + question_uuid = "96d69278-44ac-4631-aeea-c90fb08a1b2b" non_json_serialisable_thing = NonJSONSerialisable() # Check that it can't be serialised to JSON. @@ -73,10 +76,13 @@ def test_emit_with_non_json_serialisable_args(self): with patch("octue.cloud.emulators._pub_sub.MockPublisher.publish") as mock_publish: GoogleCloudPubSubHandler( event_emitter=service._emit_event, - question_uuid="question-uuid", + question_uuid=question_uuid, + parent_question_uuid=None, + originator_question_uuid=question_uuid, + parent="another/service:1.0.0", originator="another/service:1.0.0", recipient="another/service:1.0.0", - order=EventCounter(), + retry_count=0, ).emit(record) self.assertEqual( diff --git a/tests/cloud/pub_sub/test_service.py b/tests/cloud/pub_sub/test_service.py index 6900565a9..4ab5bff45 100644 --- a/tests/cloud/pub_sub/test_service.py +++ b/tests/cloud/pub_sub/test_service.py @@ -63,11 +63,6 @@ def test_repr(self): service = Service(backend=BACKEND) self.assertEqual(repr(service), f"") - def test_repr_with_name(self): - """Test that services are represented using their name if they have one.""" - service = Service(backend=BACKEND, name=f"octue/blah-service:{MOCK_SERVICE_REVISION_TAG}") - self.assertEqual(repr(service), f"") - def test_service_id_cannot_be_non_none_empty_value(self): """Ensure that a ValueError is raised if a non-None empty value is provided as the service_id.""" with self.assertRaises(ValueError): @@ -696,11 +691,11 @@ def test_child_messages_can_be_recorded_by_parent(self): parent.wait_for_answer(subscription) # Check that the child's messages have been recorded by the parent. - self.assertEqual(parent.received_events[0]["kind"], "delivery_acknowledgement") - self.assertEqual(parent.received_events[1]["kind"], "log_record") - self.assertEqual(parent.received_events[2]["kind"], "log_record") - self.assertEqual(parent.received_events[3]["kind"], "log_record") - self.assertEqual(parent.received_events[4], {"kind": "result", "output_values": "Hello! It worked!"}) + self.assertEqual(parent.received_events[0]["event"]["kind"], "delivery_acknowledgement") + self.assertEqual(parent.received_events[1]["event"]["kind"], "log_record") + self.assertEqual(parent.received_events[2]["event"]["kind"], "log_record") + self.assertEqual(parent.received_events[3]["event"]["kind"], "log_record") + self.assertEqual(parent.received_events[4]["event"], {"kind": "result", "output_values": "Hello! It worked!"}) def test_child_exception_message_can_be_recorded_by_parent(self): """Test that the parent can record exceptions raised by the child.""" @@ -713,9 +708,9 @@ def test_child_exception_message_can_be_recorded_by_parent(self): parent.wait_for_answer(subscription) # Check that the child's messages have been recorded by the parent. - self.assertEqual(parent.received_events[0]["kind"], "delivery_acknowledgement") - self.assertEqual(parent.received_events[1]["kind"], "exception") - self.assertIn("Oh no.", parent.received_events[1]["exception_message"]) + self.assertEqual(parent.received_events[0]["event"]["kind"], "delivery_acknowledgement") + self.assertEqual(parent.received_events[1]["event"]["kind"], "exception") + self.assertIn("Oh no.", parent.received_events[1]["event"]["exception_message"]) def test_child_sends_heartbeat_messages_at_expected_regular_intervals(self): """Test that children send heartbeat messages at the expected regular intervals.""" @@ -742,11 +737,11 @@ def run_function(*args, **kwargs): parent.wait_for_answer(subscription) - self.assertEqual(parent.received_events[1]["kind"], "heartbeat") - self.assertEqual(parent.received_events[2]["kind"], "heartbeat") + self.assertEqual(parent.received_events[1]["event"]["kind"], "heartbeat") + self.assertEqual(parent.received_events[2]["event"]["kind"], "heartbeat") - first_heartbeat_time = datetime.datetime.fromisoformat(parent.received_events[1]["datetime"]) - second_heartbeat_time = datetime.datetime.fromisoformat(parent.received_events[2]["datetime"]) + first_heartbeat_time = datetime.datetime.fromisoformat(parent.received_events[1]["attributes"]["datetime"]) + second_heartbeat_time = datetime.datetime.fromisoformat(parent.received_events[2]["attributes"]["datetime"]) self.assertAlmostEqual( second_heartbeat_time - first_heartbeat_time, diff --git a/tests/cloud/pub_sub/test_subscription.py b/tests/cloud/pub_sub/test_subscription.py index 649ab21f5..652dd24ab 100644 --- a/tests/cloud/pub_sub/test_subscription.py +++ b/tests/cloud/pub_sub/test_subscription.py @@ -71,6 +71,7 @@ def test_create_pull_subscription(self): self.assertEqual(response._pb.ack_deadline_seconds, 600) self.assertEqual(response._pb.expiration_policy.ttl.seconds, THIRTY_ONE_DAYS) self.assertEqual(response._pb.message_retention_duration.seconds, 600) + self.assertTrue(response._pb.enable_message_ordering) self.assertEqual(response._pb.retry_policy.minimum_backoff.seconds, 10) self.assertEqual(response._pb.retry_policy.maximum_backoff.seconds, 600) @@ -86,6 +87,7 @@ def test_create_push_subscription(self): self.assertEqual(response._pb.ack_deadline_seconds, 600) self.assertEqual(response._pb.expiration_policy.ttl.seconds, THIRTY_ONE_DAYS) self.assertEqual(response._pb.message_retention_duration.seconds, 600) + self.assertTrue(response._pb.enable_message_ordering) self.assertEqual(response._pb.retry_policy.minimum_backoff.seconds, 10) self.assertEqual(response._pb.retry_policy.maximum_backoff.seconds, 600) self.assertEqual(response._pb.push_config.push_endpoint, "https://example.com/endpoint") diff --git a/tests/data/events.json b/tests/data/events.json index b170a47e7..dd3fa62e9 100644 --- a/tests/data/events.json +++ b/tests/data/events.json @@ -1,14 +1,16 @@ [ { "event": { - "datetime": "2024-03-06T15:44:18.156044", "kind": "delivery_acknowledgement" }, "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "0", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -45,8 +47,11 @@ "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "1", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -83,8 +88,11 @@ "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "2", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -121,8 +129,11 @@ "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "3", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -159,8 +170,11 @@ "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "4", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -197,8 +211,11 @@ "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "5", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -230,8 +247,11 @@ "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "6", + "retry_count": 0, "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", @@ -241,14 +261,15 @@ }, { "event": { - "datetime": "2024-03-06T15:46:18.167424", "kind": "heartbeat" }, "attributes": { "datetime": "2024-04-11T10:46:48.236064", "uuid": "a9de11b1-e88f-43fa-b3a4-40a590c3443f", - "order": "7", "question_uuid": "d45c7e99-d610-413b-8130-dd6eef46dda6", + "parent_question_uuid": "5776ad74-52a6-46f7-a526-90421d91b8b2", + "originator_question_uuid": "86dc55b2-4282-42bd-92d0-bd4991ae7356", + "parent": "octue/test-service:1.0.0", "originator": "octue/test-service:1.0.0", "sender": "octue/test-service:1.0.0", "sender_type": "CHILD", diff --git a/tests/cloud/emulators/valid_child_emulator_files/empty_file.json b/tests/data/valid_child_emulator_files/empty_file.json similarity index 100% rename from tests/cloud/emulators/valid_child_emulator_files/empty_file.json rename to tests/data/valid_child_emulator_files/empty_file.json diff --git a/tests/cloud/emulators/valid_child_emulator_files/file_with_exception.json b/tests/data/valid_child_emulator_files/file_with_exception.json similarity index 100% rename from tests/cloud/emulators/valid_child_emulator_files/file_with_exception.json rename to tests/data/valid_child_emulator_files/file_with_exception.json diff --git a/tests/cloud/emulators/valid_child_emulator_files/file_with_only_events.json b/tests/data/valid_child_emulator_files/file_with_only_events.json similarity index 100% rename from tests/cloud/emulators/valid_child_emulator_files/file_with_only_events.json rename to tests/data/valid_child_emulator_files/file_with_only_events.json diff --git a/tests/cloud/emulators/valid_child_emulator_files/file_with_output_manifest.json b/tests/data/valid_child_emulator_files/file_with_output_manifest.json similarity index 100% rename from tests/cloud/emulators/valid_child_emulator_files/file_with_output_manifest.json rename to tests/data/valid_child_emulator_files/file_with_output_manifest.json diff --git a/tests/cloud/emulators/valid_child_emulator_files/full_file.json b/tests/data/valid_child_emulator_files/full_file.json similarity index 89% rename from tests/cloud/emulators/valid_child_emulator_files/full_file.json rename to tests/data/valid_child_emulator_files/full_file.json index c3d0892b9..113f86ac1 100644 --- a/tests/cloud/emulators/valid_child_emulator_files/full_file.json +++ b/tests/data/valid_child_emulator_files/full_file.json @@ -4,7 +4,7 @@ "name": "GCPPubSubBackend", "project_name": "blah" }, - "internal_service_name": "octue/my-service:2.3.0", + "internal_sruid": "octue/my-service:2.3.0", "events": [ { "kind": "log_record", diff --git a/tests/resources/test_child.py b/tests/resources/test_child.py index 4acbea5ef..00dcf1eab 100644 --- a/tests/resources/test_child.py +++ b/tests/resources/test_child.py @@ -1,4 +1,5 @@ import functools +import logging import os import random import threading @@ -36,7 +37,7 @@ class TestChild(BaseTestCase): @classmethod def setUpClass(cls): - """Start the service patcher.. + """Start the service patcher. :return None: """ @@ -44,7 +45,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - """Stop the services patcher. + """Stop the service patcher. :return None: """ @@ -99,6 +100,26 @@ def mock_run_function(analysis_id, input_values, *args, **kwargs): self.assertEqual(child.ask([1, 2, 3, 4])[0]["output_values"], [1, 2, 3, 4]) self.assertEqual(child.ask([5, 6, 7, 8])[0]["output_values"], [5, 6, 7, 8]) + +class TestAskMultiple(BaseTestCase): + service_patcher = ServicePatcher() + + @classmethod + def setUpClass(cls): + """Start the service patcher. + + :return None: + """ + cls.service_patcher.start() + + @classmethod + def tearDownClass(cls): + """Stop the service patcher. + + :return None: + """ + cls.service_patcher.stop() + def test_ask_multiple(self): """Test that a child can be asked multiple questions in parallel and return the answers in the correct order.""" @@ -129,7 +150,7 @@ def mock_run_function(analysis_id, input_values, *args, **kwargs): ], ) - def test_error_raised_in_ask_multiple_if_one_question_fails_when_raise_errors_is_true(self): + def test_error_raised_if_one_question_fails_when_raise_errors_is_true(self): """Test that an error is raised if any of the questions given to `Child.ask_multiple` fail when `raise_errors` is `True`. """ @@ -163,7 +184,7 @@ def mock_run_function_that_sometimes_fails(analysis_id, input_values, *args, **k {"input_values": [9, 10, 11, 12]}, ) - def test_error_not_raised_by_ask_multiple_if_one_question_fails_when_raise_errors_is_false(self): + def test_error_not_raised_by_if_one_question_fails_when_raise_errors_is_false(self): """Test that an error is not raised if one of the questions given to `Child.ask_multiple` fail when `raise_errors` is `False`. """ @@ -191,16 +212,16 @@ def test_error_not_raised_by_ask_multiple_if_one_question_fails_when_raise_error failed_answers = [] for answer in answers: - if isinstance(answer, Exception): + if isinstance(answer[0], Exception): failed_answers.append(answer) else: successful_answers.append(answer) self.assertEqual(len(successful_answers), 2) self.assertEqual(len(failed_answers), 1) - self.assertIn("Deliberately raised for `Child.ask_multiple` test.", failed_answers[0].args[0]) + self.assertIn("Deliberately raised for `Child.ask_multiple` test.", failed_answers[0][0].args[0]) - def test_ask_multiple_with_failed_question_retry(self): + def test_with_failed_question_retry(self): """Test that failed questions can be automatically retried. We use a lock in the run function so that the questions always succeed/fail in this order (which is the order the questions end up being asked by the thread pool, not necessarily the order they're asked by the caller of `Child.ask_multiple`): @@ -238,7 +259,39 @@ def test_ask_multiple_with_failed_question_retry(self): ], ) - def test_ask_multiple_with_multiple_failed_question_retries(self): + def test_with_failed_question_retry_with_prepared_question_uuid(self): + """Test that questions with prepared question UUIDs can be retried.""" + responding_service = MockService( + backend=GCPPubSubBackend(project_name="blah"), + run_function=functools.partial(mock_run_function_that_fails_every_other_time, runs=Value("d", 0)), + ) + + with patch("octue.resources.child.BACKEND_TO_SERVICE_MAPPING", {"GCPPubSubBackend": MockService}): + responding_service.serve() + + child = Child(id=responding_service.id, backend={"name": "GCPPubSubBackend", "project_name": "blah"}) + + # Make sure the child's underlying mock service knows how to access the mock responding service. + child._service.children[responding_service.id] = responding_service + + # Only ask two questions so the question success/failure order plays out as desired. + answers = child.ask_multiple( + {"input_values": [1, 2, 3, 4], "question_uuid": "b2af87f1-a893-415f-85d8-b5c4385c18f6"}, + {"input_values": [5, 6, 7, 8], "question_uuid": "77763a5e-352c-48c0-8c64-bd09a82eb869"}, + raise_errors=False, + max_retries=1, + ) + + # Check that both questions succeeded. + self.assertEqual( + [answer[0] for answer in answers], + [ + {"output_manifest": None, "output_values": [1, 2, 3, 4]}, + {"output_manifest": None, "output_values": [5, 6, 7, 8]}, + ], + ) + + def test_with_multiple_failed_question_retries(self): """Test that repeatedly failed questions can be automatically retried more than once. We use a lock in the run function so that the questions always succeed/fail in this order (which is the order the questions end up being asked by the thread pool, not necessarily the order they're asked by the caller of `Child.ask_multiple`): @@ -284,7 +337,38 @@ def test_ask_multiple_with_multiple_failed_question_retries(self): ], ) - def test_ask_multiple_with_prevented_retries(self): + def test_errors_logged_when_not_raised(self): + """Test that errors from any questions still failing after retries are exhausted are logged.""" + responding_service = MockService( + backend=GCPPubSubBackend(project_name="blah"), + run_function=functools.partial(mock_run_function_that_fails_every_other_time, runs=Value("d", 0)), + ) + + with patch("octue.resources.child.BACKEND_TO_SERVICE_MAPPING", {"GCPPubSubBackend": MockService}): + responding_service.serve() + + child = Child(id=responding_service.id, backend={"name": "GCPPubSubBackend", "project_name": "blah"}) + + # Make sure the child's underlying mock service knows how to access the mock responding service. + child._service.children[responding_service.id] = responding_service + + # Only ask two questions so the question success/failure order plays out as desired. + with self.assertLogs(level=logging.ERROR) as logging_context: + child.ask_multiple( + {"input_values": [1, 2, 3, 4]}, + {"input_values": [5, 6, 7, 8]}, + raise_errors=False, + max_retries=0, + ) + + self.assertIn("failed after 0 retries (see below for error).", logging_context.output[2]) + + self.assertIn( + 'raise ValueError("Deliberately raised for `Child.ask_multiple` test.")', + logging_context.output[2], + ) + + def test_with_prevented_retries(self): """Test that retries can be prevented for specified exception types.""" responding_service = MockService( backend=GCPPubSubBackend(project_name="blah"), @@ -312,11 +396,11 @@ def test_ask_multiple_with_prevented_retries(self): failed_answers = [] for answer in answers: - if isinstance(answer, Exception): + if isinstance(answer[0], Exception): failed_answers.append(answer) else: successful_answers.append(answer) self.assertEqual(len(successful_answers), 1) self.assertEqual(len(failed_answers), 1) - self.assertIn("Deliberately raised for `Child.ask_multiple` test.", failed_answers[0].args[0]) + self.assertIn("Deliberately raised for `Child.ask_multiple` test.", failed_answers[0][0].args[0]) diff --git a/tests/resources/test_dataset.py b/tests/resources/test_dataset.py index 2d5912603..ff9a4da37 100644 --- a/tests/resources/test_dataset.py +++ b/tests/resources/test_dataset.py @@ -1,5 +1,6 @@ import copy import json +import logging import os import tempfile from unittest.mock import patch @@ -31,6 +32,14 @@ def test_len(self): dataset = self.create_valid_dataset() self.assertEqual(len(dataset), len(dataset.files)) + def test_empty_dataset_logs_warning(self): + """Test that datasets that are empty at instantiation time log a warning.""" + with tempfile.TemporaryDirectory() as temporary_directory: + with self.assertLogs(level=logging.WARNING) as logging_context: + Dataset(temporary_directory) + + self.assertIn(f"is empty at instantiation time (path {temporary_directory!r}).", logging_context.output[0]) + def test_iter(self): """Test that iterating over a Dataset is equivalent to iterating over its files.""" dataset = self.create_valid_dataset() @@ -494,7 +503,7 @@ def test_from_cloud_with_nested_dataset_and_no_metadata_file(self): """Test that a nested dataset is loaded from the cloud correctly if it has no `.octue` metadata file in it.""" dataset_path = self.create_nested_cloud_dataset(dataset_name="nested_dataset_with_no_metadata") - cloud_dataset = Dataset(path=dataset_path, recursive=True) + cloud_dataset = Dataset(path=dataset_path) self.assertEqual(cloud_dataset.path, dataset_path) self.assertEqual(cloud_dataset.name, "nested_dataset_with_no_metadata") @@ -513,7 +522,6 @@ def test_update_local_metadata(self): dataset = Dataset( path=temporary_directory, - recursive=True, id="69253db4-7972-42de-8ccc-61336a28cd50", tags={"cat": "dog"}, labels=["animals"], @@ -521,7 +529,7 @@ def test_update_local_metadata(self): dataset.update_local_metadata() - dataset_reloaded = Dataset(path=temporary_directory, recursive=True) + dataset_reloaded = Dataset(path=temporary_directory) self.assertEqual(dataset.id, dataset_reloaded.id) self.assertEqual(dataset.tags, dataset_reloaded.tags) self.assertEqual(dataset.labels, dataset_reloaded.labels) @@ -573,7 +581,7 @@ def test_upload_with_nested_dataset_preserves_nested_structure(self): """ with tempfile.TemporaryDirectory() as temporary_directory: local_paths = self._create_files_and_nested_subdirectories(temporary_directory) - dataset = Dataset(path=temporary_directory, recursive=True) + dataset = Dataset(path=temporary_directory) upload_path = storage.path.generate_gs_path(TEST_BUCKET_NAME, "my-dataset") dataset.upload(cloud_path=upload_path) @@ -598,13 +606,13 @@ def test_upload_works_with_implicit_cloud_location_if_cloud_location_previously_ provided. """ dataset_path = self.create_nested_cloud_dataset() - dataset = Dataset(path=dataset_path, recursive=True) + dataset = Dataset(path=dataset_path) dataset.upload() def test_upload_to_new_location(self): """Test that a dataset can be uploaded to a new cloud location.""" dataset_path = self.create_nested_cloud_dataset() - dataset = Dataset(dataset_path) + dataset = Dataset(dataset_path, recursive=False) new_cloud_path = storage.path.generate_gs_path(TEST_BUCKET_NAME, "new", "dataset", "location") dataset.upload(new_cloud_path) @@ -652,7 +660,7 @@ def test_download_from_nested_dataset(self): """Test that all files in a nested dataset can be downloaded with one command.""" dataset_path = self.create_nested_cloud_dataset() - dataset = Dataset(path=dataset_path, recursive=True) + dataset = Dataset(path=dataset_path) with tempfile.TemporaryDirectory() as temporary_directory: dataset.download(local_directory=temporary_directory) @@ -675,12 +683,12 @@ def test_download_from_nested_dataset_with_no_local_directory_given(self): """ dataset_path = self.create_nested_cloud_dataset() - dataset = Dataset(path=dataset_path, recursive=True) + dataset = Dataset(path=dataset_path) # Mock the temporary directory created in `Dataset.download_all_files` so we can access it for the test. temporary_directory = tempfile.TemporaryDirectory() - with patch("tempfile.TemporaryDirectory", return_value=temporary_directory): + with patch("octue.resources.dataset.RegisteredTemporaryDirectory", return_value=temporary_directory): dataset.download() with open(os.path.join(temporary_directory.name, "file_0.txt")) as f: @@ -714,7 +722,7 @@ def test_from_local_directory_recursively(self): """Test that a dataset can be instantiated from a local nested directory including its subdirectories.""" with tempfile.TemporaryDirectory() as temporary_directory: paths = self._create_files_and_nested_subdirectories(temporary_directory) - dataset = Dataset(path=temporary_directory, recursive=True) + dataset = Dataset(path=temporary_directory) # Check that all the files from the directory are present in the dataset. datafile_paths = {datafile.local_path for datafile in dataset.files} @@ -765,13 +773,13 @@ def test_exiting_context_manager_of_local_dataset_updates_local_metadata(self): with tempfile.TemporaryDirectory() as temporary_directory: self._create_files_and_nested_subdirectories(temporary_directory) - dataset = Dataset(path=temporary_directory, recursive=True) + dataset = Dataset(path=temporary_directory) with dataset: dataset.tags = {"cat": "dog"} dataset.labels = {"animals"} - reloaded_dataset = Dataset(path=temporary_directory, recursive=True) + reloaded_dataset = Dataset(path=temporary_directory) self.assertEqual(reloaded_dataset.id, dataset.id) self.assertEqual(reloaded_dataset.tags, {"cat": "dog"}) self.assertEqual(reloaded_dataset.labels, {"animals"}) @@ -779,7 +787,7 @@ def test_exiting_context_manager_of_local_dataset_updates_local_metadata(self): def test_exiting_context_manager_of_cloud_dataset_updates_cloud_metadata(self): """Test that cloud metadata for a cloud dataset is updated on exit of the dataset context manager.""" dataset_path = self.create_nested_cloud_dataset() - dataset = Dataset(path=dataset_path, recursive=True) + dataset = Dataset(path=dataset_path) with dataset: dataset.tags = {"cat": "dog"} diff --git a/tests/templates/test_template_apps.py b/tests/templates/test_template_apps.py index d5b784268..83006ec21 100644 --- a/tests/templates/test_template_apps.py +++ b/tests/templates/test_template_apps.py @@ -120,7 +120,7 @@ def test_child_services_template(self): app_src=parent_service_path, twine=os.path.join(parent_service_path, "twine.json"), children=children, - service_id="template-child-services/parent-service", + service_id="template-child-services/parent-service:local", ) analysis = runner.run(input_values=os.path.join(parent_service_path, "data", "input", "values.json")) @@ -146,7 +146,7 @@ def test_child_services_template_using_emulated_children(self): emulated_children = [ ChildEmulator( id=f"template-child-services/wind-speed-service:{MOCK_SERVICE_REVISION_TAG}", - internal_service_name=runner.service_id, + internal_sruid=runner.service_id, events=[ {"kind": "log_record", "log_record": {"msg": "This is an emulated child log message."}}, {"kind": "result", "output_values": [10], "output_manifest": None}, @@ -154,7 +154,7 @@ def test_child_services_template_using_emulated_children(self): ), ChildEmulator( id=f"template-child-services/elevation-service:{MOCK_SERVICE_REVISION_TAG}", - internal_service_name=runner.service_id, + internal_sruid=runner.service_id, events=[ {"kind": "result", "output_values": [300], "output_manifest": None}, ], diff --git a/tests/test_cli.py b/tests/test_cli.py index 97c02da0c..20a92e0a5 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -9,8 +9,10 @@ from octue.cli import octue_cli from octue.cloud import storage -from octue.cloud.emulators._pub_sub import MockService, MockTopic +from octue.cloud.emulators._pub_sub import MockService, MockSubscription, MockTopic from octue.cloud.emulators.child import ServicePatcher +from octue.cloud.events import OCTUE_SERVICES_PREFIX +from octue.cloud.pub_sub import Topic from octue.configuration import AppConfiguration, ServiceConfiguration from octue.resources import Dataset from octue.utils.patches import MultiPatcher @@ -266,6 +268,33 @@ def setUpClass(cls): diagnostics.upload(storage.path.join(cls.DIAGNOSTICS_CLOUD_PATH, cls.ANALYSIS_ID)) + def test_warning_logged_if_no_diagnostics_found(self): + """Test that a warning about there being no diagnostics is logged if the diagnostics cloud path is empty.""" + with tempfile.TemporaryDirectory() as temporary_directory: + result = CliRunner().invoke( + octue_cli, + [ + "get-diagnostics", + storage.path.join(self.DIAGNOSTICS_CLOUD_PATH, "9f4ccee3-15b0-4a03-b5ac-c19e1d66a709"), + "--local-path", + temporary_directory, + ], + ) + + self.assertIn( + "Attempted to download files from 'gs://octue-sdk-python-test-bucket/diagnostics/9f4ccee3-15b0-4a03-b5ac-" + "c19e1d66a709' but it appears empty. Please check this is the correct path.", + result.output, + ) + + self.assertIn( + "No diagnostics found at 'gs://octue-sdk-python-test-bucket/diagnostics/9f4ccee3-15b0-4a03-b5ac-" + "c19e1d66a709'", + result.output, + ) + + self.assertNotIn("Downloaded diagnostics from", result.output) + def test_get_diagnostics(self): """Test that only the values files, manifests, and questions file are downloaded when using the `get-diagnostics` CLI command. @@ -423,3 +452,36 @@ def test_create_push_subscription(self): self.assertEqual(subscription.call_args.kwargs["name"], "octue.example-service.3-5-0") self.assertEqual(subscription.call_args.kwargs["push_endpoint"], "https://example.com/endpoint") self.assertEqual(subscription.call_args.kwargs["expiration_time"], expected_expiration_time) + self.assertEqual(result.output, "Subscription for 'octue/example-service:3.5.0' created.\n") + + def test_create_push_subscription_when_already_exists(self): + """Test attempting to create a push subscription for a service revision when one already exists for it.""" + sruid = "octue.example-service.3-5-0" + push_endpoint = "https://example.com/endpoint" + + with patch("octue.cloud.pub_sub.Topic", new=MockTopic): + with patch("octue.cloud.pub_sub.Subscription", new=MockSubscription): + subscription = MockSubscription( + name=sruid, + topic=Topic(name=OCTUE_SERVICES_PREFIX, project_name="my-project"), + push_endpoint=push_endpoint, + ) + + subscription.create() + + result = CliRunner().invoke( + octue_cli, + [ + "deploy", + "create-push-subscription", + "my-project", + "octue", + "example-service", + push_endpoint, + "--revision-tag=3.5.0", + ], + ) + + self.assertIsNone(result.exception) + self.assertEqual(result.exit_code, 0) + self.assertEqual(result.output, "Subscription for 'octue/example-service:3.5.0' already exists.\n") diff --git a/tests/test_log_handlers.py b/tests/test_log_handlers.py index 4e11b7c84..d54fc69a9 100644 --- a/tests/test_log_handlers.py +++ b/tests/test_log_handlers.py @@ -136,7 +136,7 @@ def test_octue_formatter_not_applied_to_existing_handler_if_use_octue_log_handle with analysis_log_handler_switcher: analysis_formatter = root_logger.handlers[0].formatter self.assertIs(analysis_formatter, initial_formatter) - self.assertNotIn("[analysis-hello-moto]", analysis_formatter._fmt) + self.assertNotIn("[hello-moto]", analysis_formatter._fmt) self.assertIs(root_logger.handlers[0].formatter, initial_formatter) @@ -146,7 +146,7 @@ def test_octue_formatter_applied_to_existing_handler(self): """ root_logger = logging.getLogger() initial_handler = root_logger.handlers[0] - self.assertNotIn("[analysis-hello-moto]", initial_handler.formatter._fmt) + self.assertNotIn("[hello-moto]", initial_handler.formatter._fmt) analysis_log_handler_switcher = AnalysisLogFormatterSwitcher( analysis_id="hello-moto", @@ -155,7 +155,7 @@ def test_octue_formatter_applied_to_existing_handler(self): ) with analysis_log_handler_switcher: - self.assertIn("[analysis-hello-moto]", initial_handler.formatter._fmt) + self.assertIn("[hello-moto]", initial_handler.formatter._fmt) self.assertIs(root_logger.handlers[0], initial_handler) @@ -219,7 +219,7 @@ def test_log_messages_handled_via_root_logger_are_capturable(self): with analysis_log_handler_switcher: root_logger.info("Log message to be captured.") - self.assertIn("[analysis-hello-moto]", logging_context.output[0]) + self.assertIn("[hello-moto]", logging_context.output[0]) self.assertEqual(logging_context.records[0].message, "Log message to be captured.") def test_submodule_logs_are_handled_and_capturable(self): @@ -239,10 +239,10 @@ def test_submodule_logs_are_handled_and_capturable(self): with analysis_log_handler_switcher: app_using_submodule(None) - self.assertIn("[analysis-hello-moto]", logging_context.output[0]) + self.assertIn("[hello-moto]", logging_context.output[0]) self.assertEqual(logging_context.records[0].name, "tests.test_app_modules.app_using_submodule.app") self.assertEqual(logging_context.records[0].message, "Log message from app.") - self.assertIn("[analysis-hello-moto]", logging_context.output[1]) + self.assertIn("[hello-moto]", logging_context.output[1]) self.assertEqual(logging_context.records[1].name, "tests.test_app_modules.app_using_submodule.submodule") self.assertEqual(logging_context.records[1].message, "Log message from submodule.") diff --git a/tests/test_runner.py b/tests/test_runner.py index db3c322bd..173bf25c8 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -16,6 +16,7 @@ from octue.cloud.storage import GoogleCloudStorageClient from octue.resources import Dataset, Manifest from octue.resources.datafile import Datafile +from octue.utils.files import RegisteredTemporaryDirectory, registered_temporary_directories from tests import MOCK_SERVICE_REVISION_TAG, TEST_BUCKET_NAME, TESTS_DIR from tests.base import BaseTestCase from tests.test_app_modules.app_class.app import App @@ -266,11 +267,11 @@ def test_app_submodule_logs_are_handled(self): with self.assertLogs(level=logging.INFO) as logging_context: runner.run(analysis_id=analysis_id) - self.assertIn(f"[analysis-{analysis_id}]", logging_context.output[0]) + self.assertIn(analysis_id, logging_context.output[0]) self.assertEqual(logging_context.records[0].name, "app") self.assertEqual(logging_context.records[0].message, "Log message from app.") - self.assertIn(f"[analysis-{analysis_id}]", logging_context.output[1]) + self.assertIn(analysis_id, logging_context.output[1]) self.assertEqual(logging_context.records[1].name, "tests.test_app_modules.app_using_submodule.submodule") self.assertEqual(logging_context.records[1].message, "Log message from submodule.") @@ -388,10 +389,10 @@ def app(analysis): self.assertEqual(questions[1]["id"], f"octue/yet-another-child:{MOCK_SERVICE_REVISION_TAG}") self.assertEqual(questions[1]["input_values"], "miaow") - self.assertEqual(questions[1]["events"][1]["kind"], "exception") - self.assertEqual(questions[1]["events"][1]["exception_type"], "ValueError") + self.assertEqual(questions[1]["events"][1]["event"]["kind"], "exception") + self.assertEqual(questions[1]["events"][1]["event"]["exception_type"], "ValueError") self.assertEqual( - questions[1]["events"][1]["exception_message"], + questions[1]["events"][1]["event"]["exception_message"], f"Error in : Deliberately raised for " f"testing.", ) @@ -440,8 +441,10 @@ def test_valid_output_location(self): """Test that a valid cloud path passes output location validation.""" Runner(".", twine="{}", output_location="gs://my-bucket/blah") - def test_downloaded_datafiles_are_deleted_when_runner_finishes(self): - """Test that datafiles downloaded during an analysis are deleted when the runner finishes.""" + def test_downloaded_datafiles_and_registered_temporary_directories_are_deleted_when_runner_finishes(self): + """Test that datafiles downloaded and registered temporary directories created during an analysis are deleted + when the runner finishes. + """ twine = { "output_values_schema": { "type": "object", @@ -455,11 +458,20 @@ def test_downloaded_datafiles_are_deleted_when_runner_finishes(self): def app_that_downloads_datafile(analysis): datafile = Datafile(cloud_path) + + # Download a datafile locally. datafile.download() + + # Create a temporary directory. + temporary_directory = RegisteredTemporaryDirectory() + self.assertTrue(os.path.exists(temporary_directory.name)) + analysis.output_values = {"downloaded_file_path": datafile.local_path} - analysis = Runner(app_src=app_that_downloads_datafile, twine=twine).run() + analysis = Runner(app_src=app_that_downloads_datafile, twine=twine, delete_local_files=True).run() + self.assertFalse(os.path.exists(analysis.output_values["downloaded_file_path"])) + self.assertFalse(os.path.exists(registered_temporary_directories[0].name)) class TestRunnerWithRequiredDatasetFileTags(BaseTestCase): @@ -851,14 +863,14 @@ def app(analysis): self.assertEqual(questions[0]["key"], "my-child") self.assertEqual(questions[0]["id"], f"octue/a-child:{MOCK_SERVICE_REVISION_TAG}") self.assertEqual(questions[0]["input_values"], [1, 2, 3, 4]) - self.assertEqual(questions[0]["events"][1]["output_values"], [1, 4, 9, 16]) + self.assertEqual(questions[0]["events"][1]["event"]["output_values"], [1, 4, 9, 16]) self.assertEqual(len(questions[0]["events"]), 2) # Second question. self.assertEqual(questions[1]["key"], "another-child") self.assertEqual(questions[1]["id"], f"octue/another-child:{MOCK_SERVICE_REVISION_TAG}") self.assertEqual(questions[1]["input_values"], "miaow") - self.assertEqual(questions[1]["events"][1]["output_values"], "woof") + self.assertEqual(questions[1]["events"][1]["event"]["output_values"], "woof") # This should be 4 but log messages aren't currently being handled by the child emulator correctly. self.assertEqual(len(questions[1]["events"]), 2) diff --git a/tests/utils/test_exceptions.py b/tests/utils/test_exceptions.py new file mode 100644 index 000000000..91a0f34bf --- /dev/null +++ b/tests/utils/test_exceptions.py @@ -0,0 +1,29 @@ +import unittest + +from octue.utils.exceptions import convert_exception_to_primitives + + +class TestExceptions(unittest.TestCase): + def test_with_caught_exception(self): + """Test converting a caught exception (without passing the exception directly).""" + try: + raise ValueError("Deliberately raised for test.") + except Exception: + converted_exception = convert_exception_to_primitives() + + self.assertEqual(converted_exception["type"], "ValueError") + self.assertEqual(converted_exception["message"], "Deliberately raised for test.") + + self.assertIn( + 'in test_with_caught_exception\n raise ValueError("Deliberately raised for test.")', + converted_exception["traceback"][0], + ) + + def test_with_passed_unraised_exception(self): + """Test converting an unraised exception passed in to the function.""" + exception = ValueError("Deliberately raised for test.") + converted_exception = convert_exception_to_primitives(exception) + + self.assertEqual(converted_exception["type"], "ValueError") + self.assertEqual(converted_exception["message"], "Deliberately raised for test.") + self.assertIn("in convert_exception_to_primitives\n raise exception", converted_exception["traceback"][0]) diff --git a/tests/utils/test_files.py b/tests/utils/test_files.py new file mode 100644 index 000000000..bea6fec7a --- /dev/null +++ b/tests/utils/test_files.py @@ -0,0 +1,10 @@ +import unittest + +from octue.utils.files import RegisteredTemporaryDirectory, registered_temporary_directories + + +class TestRegisteredTemporaryDirectory(unittest.TestCase): + def test_is_registered(self): + """Test that the directory is registered in the `temporary_directories` list.""" + file = RegisteredTemporaryDirectory() + self.assertIn(file, registered_temporary_directories) diff --git a/tests/utils/test_metadata.py b/tests/utils/test_metadata.py index e554c10ea..a64cf84a4 100644 --- a/tests/utils/test_metadata.py +++ b/tests/utils/test_metadata.py @@ -4,6 +4,7 @@ import tempfile from unittest.mock import patch +from octue.resources import Datafile, Dataset from octue.utils.metadata import cached_local_metadata_files, load_local_metadata_file, overwrite_local_metadata_file from tests.base import BaseTestCase @@ -13,12 +14,14 @@ def test_warning_raised_and_empty_dictionary_returned_if_local_metadata_file_cor """Test that a warning is raised and an empty dictionary is returned if trying to load a corrupted local metadata file (e.g. not in JSON format). """ - with tempfile.NamedTemporaryFile(delete=False) as temporary_file: - with open(temporary_file.name, "w") as f: + with tempfile.TemporaryDirectory() as temporary_directory: + datafile = Datafile(os.path.join(temporary_directory, "datafile.dat")) + + with open(datafile.metadata_path, "w") as f: f.write("some gobbledeegook") with self.assertLogs(level=logging.WARNING) as logging_context: - local_metadata = load_local_metadata_file(temporary_file.name) + local_metadata = load_local_metadata_file(datafile) self.assertEqual(local_metadata, {}) @@ -30,23 +33,25 @@ def test_warning_raised_and_empty_dictionary_returned_if_local_metadata_file_cor def test_empty_dictionary_returned_if_local_metadata_file_does_not_exist(self): """Test that an empty dictionary is returned if trying to load a local metadata file that doesn't exist.""" with tempfile.TemporaryDirectory() as temporary_directory: - self.assertEqual(load_local_metadata_file(path=os.path.join(temporary_directory, ".octue")), {}) + self.assertEqual(load_local_metadata_file(Dataset(path=temporary_directory)), {}) def test_local_metadata_is_cached_once_loaded_in_python_session(self): """Test that, if a local metadata file has been loaded once during the python session, it is loaded from the cache instead of from disk for the rest of the session. """ - with tempfile.NamedTemporaryFile(delete=False) as temporary_file: - with open(temporary_file.name, "w") as f: + with tempfile.TemporaryDirectory() as temporary_directory: + datafile = Datafile(os.path.join(temporary_directory, "datafile.dat")) + + with open(datafile.metadata_path, "w") as f: json.dump({"some": "data"}, f) # Load the metadata file once and check its contents have been cached. - load_local_metadata_file(temporary_file.name) - self.assertEqual(cached_local_metadata_files[temporary_file.name], {"some": "data"}) + load_local_metadata_file(datafile) + self.assertEqual(cached_local_metadata_files[datafile.id], {"some": "data"}) # Check that it's not loaded from disk again. with patch("builtins.open") as mock_open: - local_metadata = load_local_metadata_file(temporary_file.name) + local_metadata = load_local_metadata_file(datafile) mock_open.assert_not_called() self.assertEqual(local_metadata, {"some": "data"}) @@ -55,18 +60,20 @@ def test_local_metadata_is_cached_if_already_written_to_in_python_session(self): """Test that, if a local metadata file has been written to during the python session, it is loaded from the cache instead of from disk for the rest of the session. """ - with tempfile.NamedTemporaryFile(delete=False) as temporary_file: + with tempfile.TemporaryDirectory() as temporary_directory: + datafile = Datafile(os.path.join(temporary_directory, "datafile.dat")) + # Write the metadata file and check its contents have been cached. - overwrite_local_metadata_file(data={"some": "data"}, path=temporary_file.name) - self.assertEqual(cached_local_metadata_files[temporary_file.name], {"some": "data"}) + overwrite_local_metadata_file(data={"some": "data"}, datafile_or_dataset=datafile) + self.assertEqual(cached_local_metadata_files[datafile.id], {"some": "data"}) # Check the file has been written correctly. - with open(temporary_file.name) as f: + with open(datafile.metadata_path) as f: self.assertEqual(json.load(f), {"some": "data"}) # Check that it's not loaded from disk again. with patch("builtins.open") as mock_open: - local_metadata = load_local_metadata_file(temporary_file.name) + local_metadata = load_local_metadata_file(datafile) mock_open.assert_not_called() self.assertEqual(local_metadata, {"some": "data"}) @@ -75,18 +82,20 @@ def test_cache_not_busted_if_overwriting_with_same_data(self): """Test that the cache is not busted and the local metadata file is not rewritten if trying to overwrite it with the same data as is in the cache. """ - with tempfile.NamedTemporaryFile(delete=False) as temporary_file: - with open(temporary_file.name, "w") as f: + with tempfile.TemporaryDirectory() as temporary_directory: + datafile = Datafile(os.path.join(temporary_directory, "datafile.dat")) + + with open(datafile.metadata_path, "w") as f: json.dump({"some": "data"}, f) # Load the metadata file once and check its contents have been cached. - load_local_metadata_file(temporary_file.name) - self.assertEqual(cached_local_metadata_files[temporary_file.name], {"some": "data"}) + load_local_metadata_file(datafile) + self.assertEqual(cached_local_metadata_files[datafile.id], {"some": "data"}) # Overwrite the metadata file with the same data. with self.assertLogs(level=logging.DEBUG) as logging_context: with patch("builtins.open") as mock_open: - overwrite_local_metadata_file({"some": "data"}, path=temporary_file.name) + overwrite_local_metadata_file({"some": "data"}, datafile) mock_open.assert_not_called() @@ -99,19 +108,21 @@ def test_cache_busted_if_overwriting_with_new_data(self): """Test that the cache is busted and the local metadata file is rewritten if trying to overwrite it with data different from what's in the cache. """ - with tempfile.NamedTemporaryFile(delete=False) as temporary_file: - with open(temporary_file.name, "w") as f: + with tempfile.TemporaryDirectory() as temporary_directory: + datafile = Datafile(os.path.join(temporary_directory, "datafile.dat")) + + with open(datafile.metadata_path, "w") as f: json.dump({"some": "data"}, f) # Load the metadata file once and check its contents have been cached. - load_local_metadata_file(temporary_file.name) - self.assertEqual(cached_local_metadata_files[temporary_file.name], {"some": "data"}) + load_local_metadata_file(datafile) + self.assertEqual(cached_local_metadata_files[datafile.id], {"some": "data"}) - overwrite_local_metadata_file({"new": "information"}, path=temporary_file.name) + overwrite_local_metadata_file({"new": "information"}, datafile) # Check the metadata file has been overwritten. - with open(temporary_file.name) as f: + with open(datafile.metadata_path) as f: self.assertEqual(json.load(f), {"new": "information"}) # Check the cache entry has been updated. - self.assertEqual(cached_local_metadata_files[temporary_file.name], {"new": "information"}) + self.assertEqual(cached_local_metadata_files[datafile.id], {"new": "information"})