diff --git a/.gitignore b/.gitignore index b05352d..b0fb9db 100644 --- a/.gitignore +++ b/.gitignore @@ -31,7 +31,7 @@ pip-log.txt .tox coverage.xml htmlcov/ - +/pii_report/ # The Silver Searcher @@ -59,3 +59,6 @@ docs/edx_event_bus_kafka.*.rst # Private requirements requirements/private.in requirements/private.txt + +# IDE +.idea diff --git a/Makefile b/Makefile index 39f3ef2..79e9cc0 100644 --- a/Makefile +++ b/Makefile @@ -30,13 +30,16 @@ docs: ## generate Sphinx HTML documentation, including API docs $(BROWSER)docs/_build/html/index.html # Define PIP_COMPILE_OPTS=-v to get more information during make upgrade. -PIP_COMPILE = pip-compile --rebuild --upgrade $(PIP_COMPILE_OPTS) +PIP_COMPILE = pip-compile --upgrade $(PIP_COMPILE_OPTS) upgrade: export CUSTOM_COMPILE_COMMAND=make upgrade upgrade: ## update the requirements/*.txt files with the latest packages satisfying requirements/*.in pip install -qr requirements/pip-tools.txt # Make sure to compile files after any other files they include! + $(PIP_COMPILE) --allow-unsafe -o requirements/pip.txt requirements/pip.in $(PIP_COMPILE) -o requirements/pip-tools.txt requirements/pip-tools.in + pip install -qr requirements/pip.txt + pip install -qr requirements/pip-tools.txt $(PIP_COMPILE) -o requirements/base.txt requirements/base.in $(PIP_COMPILE) -o requirements/test.txt requirements/test.in $(PIP_COMPILE) -o requirements/doc.txt requirements/doc.in @@ -63,7 +66,9 @@ test: clean ## run tests in the current virtualenv diff_cover: test ## find diff lines that need test coverage diff-cover coverage.xml -test-all: quality pii_check ## run tests on every supported Python/Django combination +test-all: clean quality pii_check ## run tests on every supported Python/Django combination + tox -e docs + rm -rf build # artifact produced by docs run, interferes with pytest tox validate: quality pii_check test ## run tests and quality checks diff --git a/codecov.yml b/codecov.yml index 4da4768..0470170 100644 --- a/codecov.yml +++ b/codecov.yml @@ -7,6 +7,6 @@ coverage: patch: default: enabled: yes - target: 100% + target: 95% comment: false diff --git a/docs/how_tos/manual_testing.rst b/docs/how_tos/manual_testing.rst new file mode 100644 index 0000000..a56b81e --- /dev/null +++ b/docs/how_tos/manual_testing.rst @@ -0,0 +1,38 @@ +Manual testing +============== + +The producer can be tested manually against a Kafka running in devstack. + +#. Create a "unit test" in one of the test files that will actually call Kafka. For example, this could be added to the end of ``edx_event_bus_kafka/publishing/test_event_producer.py``:: + + def test_actually_send_to_event_bus(): + import random + signal = openedx_events.learning.signals.SESSION_LOGIN_COMPLETED + # Make events distinguishable + id = random.randrange(1000) + event_data = { + 'user': UserData( + id=id, + is_active=True, + pii=UserPersonalData( + username=f'foobob_{id:03}', + email='bob@foo.example', + name="Bob Foo", + ) + ) + } + + print(f"Sending event with random user ID {id}.") + with override_settings( + SCHEMA_REGISTRY_URL='http://edx.devstack.schema-registry:8081', + KAFKA_BOOTSTRAP_SERVERS='edx.devstack.kafka:29092', + ): + ep.send_to_event_bus(signal, 'user_stuff', 'user.id', event_data) + +#. Make or refresh a copy of this repo where it can be seen from inside devstack: ``rsync -sax -delete ./ ../src/event-bus-kafka/`` +#. In devstack, start Kafka and the control webapp: ``make dev.up.kafka-control-center`` and watch ``make dev.logs.kafka-control-center`` until server is up and happy (may take a few minutes; watch for ``INFO Kafka startTimeMs``) +#. Load the control center UI: http://localhost:9021/clusters and wait for the cluster to become healthy +#. In devstack, run ``lms-up-without-deps-shell`` to bring up an arbitrary shell inside Docker networking (LMS, in this case) +#. In the LMS shell, run ``pip install -e /edx/src/event-bus-kafka`` and then run whatever test you want, e.g. ``pytest /edx/src/event-bus-kafka/edx_event_bus_kafka/publishing/test_event_producer.py::test_actually_send_to_event_bus`` +#. Go to the topic that was created and then into the Messages tab; select offset=0 to make sure you can see messages that were sent before you had the UI open. +#. Rerun ``rsync`` after any edits diff --git a/docs/index.rst b/docs/index.rst index 46b6f83..6217502 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,6 +14,7 @@ Contents: :maxdepth: 2 readme + how_tos/manual_testing getting_started testing internationalization diff --git a/edx_event_bus_kafka/__init__.py b/edx_event_bus_kafka/__init__.py index 2baefa8..89dbc37 100644 --- a/edx_event_bus_kafka/__init__.py +++ b/edx_event_bus_kafka/__init__.py @@ -3,5 +3,3 @@ """ __version__ = '0.1.0' - -default_app_config = 'edx_event_bus_kafka.apps.EdxEventBusKafkaConfig' # pylint: disable=invalid-name diff --git a/edx_event_bus_kafka/publishing/__init__.py b/edx_event_bus_kafka/publishing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/edx_event_bus_kafka/publishing/event_producer.py b/edx_event_bus_kafka/publishing/event_producer.py new file mode 100644 index 0000000..06c6eff --- /dev/null +++ b/edx_event_bus_kafka/publishing/event_producer.py @@ -0,0 +1,221 @@ +""" +Produce Kafka events from signals. + +Main function is ``send_to_event_bus``. +""" + +import json +import logging +import warnings +from functools import lru_cache +from typing import Any, List, Optional + +from confluent_kafka import SerializingProducer +from confluent_kafka.schema_registry import SchemaRegistryClient +from confluent_kafka.schema_registry.avro import AvroSerializer +from django.conf import settings +from openedx_events.event_bus.avro.serializer import AvroSignalSerializer +from openedx_events.tooling import OpenEdxPublicSignal + +logger = logging.getLogger(__name__) + +# CloudEvent standard name for the event type header, see +# https://github.com/cloudevents/spec/blob/v1.0.1/kafka-protocol-binding.md#325-example +EVENT_TYPE_HEADER_KEY = "ce_type" + + +def extract_event_key(event_data: dict, event_key_field: str) -> Any: + """ + From an event object, extract a Kafka event key (not yet serialized). + + Arguments: + event_data: The event data (kwargs) sent to the signal + event_key_field: Path to the event data field to use as the event key (period-delimited + string naming the dictionary keys to descend) + + Returns: + Key data, which might be an integer, string, dictionary, etc. + """ + field_path = event_key_field.split(".") + current_data = event_data + for field_name in field_path: + if isinstance(current_data, dict): + if field_name not in current_data: + raise Exception( + f"Could not extract key from event; lookup in {event_key_field} " + f"failed at {field_name!r} in dictionary" + ) + current_data = current_data[field_name] + else: + if not hasattr(current_data, field_name): + raise Exception( + f"Could not extract key from event; lookup in {event_key_field} " + f"failed at {field_name!r} in object" + ) + current_data = getattr(current_data, field_name) + return current_data + + +def descend_avro_schema(serializer_schema: dict, field_path: List[str]) -> dict: + """ + Extract a subfield within an Avro schema, recursively. + + Arguments: + serializer_schema: An Avro schema (nested dictionaries) + field_path: List of strings matching the 'name' of successively deeper subfields + + Returns: + Schema for some field + + TODO: Move to openedx_events.event_bus.avro.serializer? + """ + subschema = serializer_schema + for field_name in field_path: + try: + # Either descend into .fields (for dictionaries) or .type.fields (for classes). + if 'fields' not in subschema: + # Descend through .type wrapper first + subschema = subschema['type'] + field_list = subschema['fields'] + + matching = [field for field in field_list if field['name'] == field_name] + subschema = matching[0] + except BaseException as e: + raise Exception( + f"Error traversing Avro schema along path {field_path!r}; failed at {field_name!r}." + ) from e + return subschema + + +def extract_key_schema(signal_serializer: AvroSignalSerializer, event_key_field: str) -> str: + """ + From a signal's serializer, extract just the part of the Avro schema that will be used for the Kafka event key. + + Arguments: + signal_serializer: The signal serializer to extract a sub-schema from + event_key_field: Path to the event data field to use as the event key (period-delimited + string naming the dictionary keys to descend) + + Returns: + The key's schema, as a string. + """ + subschema = descend_avro_schema(signal_serializer.schema, event_key_field.split(".")) + # Same as used by AvroSignalSerializer#schema_string in openedx-events + return json.dumps(subschema, sort_keys=True) + + +@lru_cache +def get_serializer(signal: OpenEdxPublicSignal) -> AvroSignalSerializer: + """ + Get the serializer for a signal. + + This is just defined to allow caching of serializers. + """ + return AvroSignalSerializer(signal) + + +# TODO: Cache this, but in a way that still allows changes to settings +# via remote-config (and in particular does not result in mixed +# cache/uncached configuration). +def get_producer_for_signal(signal: OpenEdxPublicSignal, event_key_field: str) -> Optional[SerializingProducer]: + """ + Create the producer for a signal and a key field path. + + If essential settings are missing or invalid, warn and return None. + + Arguments: + signal: The OpenEdxPublicSignal to make a producer for + event_key_field: Path to the event data field to use as the event key (period-delimited + string naming the dictionary keys to descend) + """ + if schema_registry_url := getattr(settings, 'SCHEMA_REGISTRY_URL', None): + schema_registry_config = { + 'url': schema_registry_url, + 'basic.auth.user.info': f"{getattr(settings, 'SCHEMA_REGISTRY_API_KEY', '')}" + f":{getattr(settings, 'SCHEMA_REGISTRY_API_SECRET', '')}", + } + else: + warnings.warn("Cannot configure event-bus-kafka: Missing setting SCHEMA_REGISTRY_URL") + return None + + if bootstrap_servers := getattr(settings, 'KAFKA_BOOTSTRAP_SERVERS', None): + producer_settings = { + 'bootstrap.servers': bootstrap_servers, + } + else: + warnings.warn("Cannot configure event-bus-kafka: Missing setting KAFKA_BOOTSTRAP_SERVERS") + return None + + if getattr(settings, 'KAFKA_API_KEY', None) and getattr(settings, 'KAFKA_API_SECRET', None): + producer_settings.update({ + 'sasl.mechanism': 'PLAIN', + 'security.protocol': 'SASL_SSL', + 'sasl.username': settings.KAFKA_API_KEY, + 'sasl.password': settings.KAFKA_API_SECRET, + }) + + schema_registry_client = SchemaRegistryClient(schema_registry_config) + signal_serializer = get_serializer(signal) + + def inner_to_dict(event_data, ctx=None): # pylint: disable=unused-argument + """Tells Avro how to turn objects into dictionaries.""" + return signal_serializer.to_dict(event_data) + + # Serializers for key and value components of Kafka event + key_serializer = AvroSerializer( + schema_str=extract_key_schema(signal_serializer, event_key_field), + schema_registry_client=schema_registry_client, + to_dict=inner_to_dict, + ) + value_serializer = AvroSerializer( + schema_str=signal_serializer.schema_string(), + schema_registry_client=schema_registry_client, + to_dict=inner_to_dict, + ) + + producer_settings.update({ + 'key.serializer': key_serializer, + 'value.serializer': value_serializer, + }) + + return SerializingProducer(producer_settings) + + +def on_event_deliver(err, evt): + """ + Simple callback method for debugging event production + + Arguments: + err: Error if event production failed + evt: Event that was delivered (or failed to be delivered) + """ + if err is not None: + logger.warning(f"Event delivery failed: {err!r}") + else: + # Don't log msg.value() because it may contain userids and/or emails + logger.info(f"Event delivered to topic {evt.topic()}; key={evt.key()}; " + f"partition={evt.partition()}") + + +def send_to_event_bus(signal: OpenEdxPublicSignal, topic: str, event_key_field: str, event_data: dict) -> None: + """ + Send a signal event to the event bus under the specified topic. + + If the Kafka settings are missing or invalid, return with a warning. + + Arguments: + signal: The original OpenEdxPublicSignal the event was sent to + topic: The event bus topic for the event + event_key_field: Path to the event data field to use as the event key (period-delimited + string naming the dictionary keys to descend) + event_data: The event data (kwargs) sent to the signal + """ + producer = get_producer_for_signal(signal, event_key_field) + if producer is None: # Note: SerializingProducer has False truthiness when len() == 0 + return + + event_key = extract_event_key(event_data, event_key_field) + producer.produce(topic, key=event_key, value=event_data, + on_delivery=on_event_deliver, + headers={EVENT_TYPE_HEADER_KEY: signal.event_type}) + producer.poll() # wait indefinitely for the above event to either be delivered or fail diff --git a/edx_event_bus_kafka/publishing/test_event_producer.py b/edx_event_bus_kafka/publishing/test_event_producer.py new file mode 100644 index 0000000..1f5e4b1 --- /dev/null +++ b/edx_event_bus_kafka/publishing/test_event_producer.py @@ -0,0 +1,118 @@ +""" +Test the event producer code. +""" + +import warnings +from unittest import TestCase +from unittest.mock import MagicMock, patch + +import openedx_events.learning.signals +import pytest +from confluent_kafka import SerializingProducer +from django.test import override_settings +from openedx_events.event_bus.avro.serializer import AvroSignalSerializer +from openedx_events.learning.data import UserData, UserPersonalData + +import edx_event_bus_kafka.publishing.event_producer as ep + + +class TestEventProducer(TestCase): + """Test producer.""" + + def test_extract_event_key(self): + event_data = { + 'user': UserData( + id=123, + is_active=True, + pii=UserPersonalData( + username='foobob', + email='bob@foo.example', + name="Bob Foo", + ) + ) + } + + assert ep.extract_event_key(event_data, 'user.pii.username') == 'foobob' + with pytest.raises(Exception, + match="Could not extract key from event; lookup in xxx failed at 'xxx' in dictionary"): + ep.extract_event_key(event_data, 'xxx') + with pytest.raises(Exception, + match="Could not extract key from event; lookup in user.xxx failed at 'xxx' in object"): + ep.extract_event_key(event_data, 'user.xxx') + + def test_descend_avro_schema(self): + signal = openedx_events.learning.signals.SESSION_LOGIN_COMPLETED + schema = AvroSignalSerializer(signal).schema + + assert ep.descend_avro_schema(schema, ['user', 'pii', 'username']) == {"name": "username", "type": "string"} + + with pytest.raises(Exception) as excinfo: + ep.descend_avro_schema(schema, ['user', 'xxx']) + assert excinfo.value.args == ("Error traversing Avro schema along path ['user', 'xxx']; failed at 'xxx'.",) + assert isinstance(excinfo.value.__cause__, IndexError) + + def test_extract_key_schema(self): + signal = openedx_events.learning.signals.SESSION_LOGIN_COMPLETED + schema = ep.extract_key_schema(AvroSignalSerializer(signal), 'user.pii.username') + assert schema == '{"name": "username", "type": "string"}' + + def test_get_producer_for_signal(self): + signal = openedx_events.learning.signals.SESSION_LOGIN_COMPLETED + + # With missing essential settings, just warn and return None + with warnings.catch_warnings(record=True) as caught_warnings: + warnings.simplefilter('always') + assert ep.get_producer_for_signal(signal, 'user.id') is None + assert len(caught_warnings) == 1 + assert str(caught_warnings[0].message).startswith("Cannot configure event-bus-kafka: Missing setting ") + + # Creation succeeds when all settings are present + with override_settings( + SCHEMA_REGISTRY_URL='http://localhost:12345', + SCHEMA_REGISTRY_API_KEY='some_key', + SCHEMA_REGISTRY_API_SECRET='some_secret', + KAFKA_BOOTSTRAP_SERVERS='http://localhost:54321', + # include these just to maximize code coverage + KAFKA_API_KEY='some_other_key', + KAFKA_API_SECRET='some_other_secret', + ): + assert isinstance(ep.get_producer_for_signal(signal, 'user.id'), SerializingProducer) + + @patch('edx_event_bus_kafka.publishing.event_producer.logger') + def test_on_event_deliver(self, mock_logger): + fake_event = MagicMock() + fake_event.topic.return_value = 'some_topic' + fake_event.key.return_value = 'some_key' + fake_event.partition.return_value = 'some_partition' + + ep.on_event_deliver(Exception("problem!"), fake_event) + mock_logger.warning.assert_called_once_with("Event delivery failed: Exception('problem!')") + + ep.on_event_deliver(None, fake_event) + mock_logger.info.assert_called_once_with( + 'Event delivered to topic some_topic; key=some_key; partition=some_partition' + ) + + def test_send_to_event_bus(self): + signal = openedx_events.learning.signals.SESSION_LOGIN_COMPLETED + event_data = { + 'user': UserData( + id=123, + is_active=True, + pii=UserPersonalData( + username='foobob', + email='bob@foo.example', + name="Bob Foo", + ) + ) + } + + mock_producer = MagicMock() + with patch('edx_event_bus_kafka.publishing.event_producer.get_producer_for_signal', return_value=mock_producer): + ep.send_to_event_bus(signal, 'user_stuff', 'user.id', event_data) + + mock_producer.produce.assert_called_once_with( + 'user_stuff', key=123, value=event_data, + on_delivery=ep.on_event_deliver, + headers={'ce_type': 'org.openedx.learning.auth.session.login.completed.v1'}, + ) diff --git a/requirements/base.in b/requirements/base.in index a954780..0c8a2b6 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -1,5 +1,6 @@ # Core requirements for using this application -c constraints.txt -Django # Web application framework - +Django # Web application framework +confluent_kafka[avro,schema-registry] # Kafka client +openedx-events # Events API diff --git a/requirements/base.txt b/requirements/base.txt index ff0a9ce..48fc5fe 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,11 +6,42 @@ # asgiref==3.5.2 # via django -django==3.2.13 +attrs==21.4.0 + # via openedx-events +avro==1.10.0 + # via confluent-kafka +certifi==2022.6.15 + # via requests +charset-normalizer==2.1.0 + # via requests +confluent-kafka[avro,schema-registry]==1.9.0 + # via -r requirements/base.in +django==3.2.14 # via # -c https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt # -r requirements/base.in + # openedx-events +edx-opaque-keys[django]==2.3.0 + # via openedx-events +fastavro==1.5.2 + # via + # confluent-kafka + # openedx-events +idna==3.3 + # via requests +openedx-events==0.10.0 + # via -r requirements/base.in +pbr==5.9.0 + # via stevedore +pymongo==3.12.3 + # via edx-opaque-keys pytz==2022.1 # via django +requests==2.28.1 + # via confluent-kafka sqlparse==0.4.2 # via django +stevedore==4.0.0 + # via edx-opaque-keys +urllib3==1.26.10 + # via requests diff --git a/requirements/ci.txt b/requirements/ci.txt index a80438d..fa2e254 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -6,11 +6,11 @@ # certifi==2022.6.15 # via requests -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via requests codecov==2.1.12 # via -r requirements/ci.in -coverage==6.4.1 +coverage==6.4.2 # via codecov distlib==0.3.4 # via virtualenv @@ -30,7 +30,7 @@ py==1.11.0 # via tox pyparsing==3.0.9 # via packaging -requests==2.28.0 +requests==2.28.1 # via codecov six==1.16.0 # via @@ -38,9 +38,9 @@ six==1.16.0 # virtualenv toml==0.10.2 # via tox -tox==3.25.0 +tox==3.25.1 # via -r requirements/ci.in -urllib3==1.26.9 +urllib3==1.26.10 # via requests -virtualenv==20.14.1 +virtualenv==20.15.1 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index 052444b..55b6c1d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ asgiref==3.5.2 # via # -r requirements/quality.txt # django -astroid==2.11.6 +astroid==2.11.7 # via # -r requirements/quality.txt # pylint @@ -16,23 +16,32 @@ astroid==2.11.6 attrs==21.4.0 # via # -r requirements/quality.txt + # openedx-events # pytest -bleach==5.0.0 +avro==1.10.0 + # via + # -r requirements/quality.txt + # confluent-kafka +bleach==5.0.1 # via # -r requirements/quality.txt # readme-renderer +build==0.8.0 + # via + # -r requirements/pip-tools.txt + # pip-tools certifi==2022.6.15 # via # -r requirements/ci.txt # -r requirements/quality.txt # requests -cffi==1.15.0 +cffi==1.15.1 # via # -r requirements/quality.txt # cryptography -chardet==4.0.0 +chardet==5.0.0 # via diff-cover -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via # -r requirements/ci.txt # -r requirements/quality.txt @@ -59,17 +68,19 @@ commonmark==0.9.1 # via # -r requirements/quality.txt # rich -coverage[toml]==6.4.1 +confluent-kafka[avro,schema-registry]==1.9.0 + # via -r requirements/quality.txt +coverage[toml]==6.4.2 # via # -r requirements/ci.txt # -r requirements/quality.txt # codecov # pytest-cov -cryptography==37.0.2 +cryptography==37.0.4 # via # -r requirements/quality.txt # secretstorage -diff-cover==6.5.0 +diff-cover==6.5.1 # via -r requirements/dev.in dill==0.3.5.1 # via @@ -79,12 +90,13 @@ distlib==0.3.4 # via # -r requirements/ci.txt # virtualenv -django==3.2.13 +django==3.2.14 # via # -c https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt # -r requirements/quality.txt # edx-i18n-tools -docutils==0.18.1 + # openedx-events +docutils==0.19 # via # -r requirements/quality.txt # readme-renderer @@ -92,6 +104,15 @@ edx-i18n-tools==0.9.1 # via -r requirements/dev.in edx-lint==5.2.4 # via -r requirements/quality.txt +edx-opaque-keys[django]==2.3.0 + # via + # -r requirements/quality.txt + # openedx-events +fastavro==1.5.2 + # via + # -r requirements/quality.txt + # confluent-kafka + # openedx-events filelock==3.7.1 # via # -r requirements/ci.txt @@ -102,7 +123,7 @@ idna==3.3 # -r requirements/ci.txt # -r requirements/quality.txt # requests -importlib-metadata==4.11.4 +importlib-metadata==4.12.0 # via # -r requirements/quality.txt # keyring @@ -141,10 +162,14 @@ mccabe==0.7.0 # via # -r requirements/quality.txt # pylint +openedx-events==0.10.0 + # via -r requirements/quality.txt packaging==21.3 # via # -r requirements/ci.txt + # -r requirements/pip-tools.txt # -r requirements/quality.txt + # build # pytest # tox path==16.4.0 @@ -156,8 +181,8 @@ pbr==5.9.0 pep517==0.12.0 # via # -r requirements/pip-tools.txt - # pip-tools -pip-tools==6.6.2 + # build +pip-tools==6.8.0 # via -r requirements/pip-tools.txt pkginfo==1.8.3 # via @@ -198,7 +223,7 @@ pygments==2.12.0 # diff-cover # readme-renderer # rich -pylint==2.14.2 +pylint==2.14.4 # via # -r requirements/quality.txt # edx-lint @@ -218,9 +243,14 @@ pylint-plugin-utils==0.7 # -r requirements/quality.txt # pylint-celery # pylint-django +pymongo==3.12.3 + # via + # -r requirements/quality.txt + # edx-opaque-keys pyparsing==3.0.9 # via # -r requirements/ci.txt + # -r requirements/pip-tools.txt # -r requirements/quality.txt # packaging pytest==7.1.2 @@ -249,11 +279,12 @@ readme-renderer==35.0 # via # -r requirements/quality.txt # twine -requests==2.28.0 +requests==2.28.1 # via # -r requirements/ci.txt # -r requirements/quality.txt # codecov + # confluent-kafka # requests-toolbelt # twine requests-toolbelt==0.9.1 @@ -264,7 +295,7 @@ rfc3986==2.0.0 # via # -r requirements/quality.txt # twine -rich==12.4.4 +rich==12.5.1 # via # -r requirements/quality.txt # twine @@ -288,10 +319,11 @@ sqlparse==0.4.2 # via # -r requirements/quality.txt # django -stevedore==3.5.0 +stevedore==4.0.0 # via # -r requirements/quality.txt # code-annotations + # edx-opaque-keys text-unidecode==1.3 # via # -r requirements/quality.txt @@ -304,15 +336,16 @@ tomli==2.0.1 # via # -r requirements/pip-tools.txt # -r requirements/quality.txt + # build # coverage # pep517 # pylint # pytest -tomlkit==0.11.0 +tomlkit==0.11.1 # via # -r requirements/quality.txt # pylint -tox==3.25.0 +tox==3.25.1 # via # -r requirements/ci.txt # tox-battery @@ -320,19 +353,19 @@ tox-battery==0.6.1 # via -r requirements/dev.in twine==4.0.1 # via -r requirements/quality.txt -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # -r requirements/quality.txt # astroid # pylint # rich -urllib3==1.26.9 +urllib3==1.26.10 # via # -r requirements/ci.txt # -r requirements/quality.txt # requests # twine -virtualenv==20.14.1 +virtualenv==20.15.1 # via # -r requirements/ci.txt # tox @@ -348,7 +381,7 @@ wrapt==1.14.1 # via # -r requirements/quality.txt # astroid -zipp==3.8.0 +zipp==3.8.1 # via # -r requirements/quality.txt # importlib-metadata diff --git a/requirements/doc.txt b/requirements/doc.txt index f6ccb2b..338c9bb 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -13,19 +13,28 @@ asgiref==3.5.2 attrs==21.4.0 # via # -r requirements/test.txt + # openedx-events # pytest +avro==1.10.0 + # via + # -r requirements/test.txt + # confluent-kafka babel==2.10.3 # via sphinx -bleach==5.0.0 +bleach==5.0.1 # via readme-renderer build==0.8.0 # via -r requirements/doc.in certifi==2022.6.15 - # via requests -cffi==1.15.0 + # via + # -r requirements/test.txt + # requests +cffi==1.15.1 # via cryptography -charset-normalizer==2.0.12 - # via requests +charset-normalizer==2.1.0 + # via + # -r requirements/test.txt + # requests click==8.1.3 # via # -r requirements/test.txt @@ -34,16 +43,19 @@ code-annotations==1.3.0 # via -r requirements/test.txt commonmark==0.9.1 # via rich -coverage[toml]==6.4.1 +confluent-kafka[avro,schema-registry]==1.9.0 + # via -r requirements/test.txt +coverage[toml]==6.4.2 # via # -r requirements/test.txt # pytest-cov -cryptography==37.0.2 +cryptography==37.0.4 # via secretstorage -django==3.2.13 +django==3.2.14 # via # -c https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt # -r requirements/test.txt + # openedx-events doc8==0.11.2 # via -r requirements/doc.in docutils==0.18.1 @@ -52,13 +64,24 @@ docutils==0.18.1 # readme-renderer # restructuredtext-lint # sphinx +edx-opaque-keys[django]==2.3.0 + # via + # -r requirements/test.txt + # openedx-events edx-sphinx-theme==3.0.0 # via -r requirements/doc.in +fastavro==1.5.2 + # via + # -r requirements/test.txt + # confluent-kafka + # openedx-events idna==3.3 - # via requests -imagesize==1.3.0 + # via + # -r requirements/test.txt + # requests +imagesize==1.4.1 # via sphinx -importlib-metadata==4.11.4 +importlib-metadata==4.12.0 # via # keyring # sphinx @@ -82,6 +105,8 @@ markupsafe==2.1.1 # via # -r requirements/test.txt # jinja2 +openedx-events==0.10.0 + # via -r requirements/test.txt packaging==21.3 # via # -r requirements/test.txt @@ -112,6 +137,10 @@ pygments==2.12.0 # readme-renderer # rich # sphinx +pymongo==3.12.3 + # via + # -r requirements/test.txt + # edx-opaque-keys pyparsing==3.0.9 # via # -r requirements/test.txt @@ -140,8 +169,10 @@ pyyaml==6.0 # code-annotations readme-renderer==35.0 # via twine -requests==2.28.0 +requests==2.28.1 # via + # -r requirements/test.txt + # confluent-kafka # requests-toolbelt # sphinx # twine @@ -151,7 +182,7 @@ restructuredtext-lint==1.4.0 # via doc8 rfc3986==2.0.0 # via twine -rich==12.4.4 +rich==12.5.1 # via twine secretstorage==3.3.2 # via keyring @@ -181,11 +212,12 @@ sqlparse==0.4.2 # via # -r requirements/test.txt # django -stevedore==3.5.0 +stevedore==4.0.0 # via # -r requirements/test.txt # code-annotations # doc8 + # edx-opaque-keys text-unidecode==1.3 # via # -r requirements/test.txt @@ -199,13 +231,14 @@ tomli==2.0.1 # pytest twine==4.0.1 # via -r requirements/doc.in -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via rich -urllib3==1.26.9 +urllib3==1.26.10 # via + # -r requirements/test.txt # requests # twine webencodings==0.5.1 # via bleach -zipp==3.8.0 +zipp==3.8.1 # via importlib-metadata diff --git a/requirements/pip-tools.txt b/requirements/pip-tools.txt index ce56f4c..f49a9d4 100644 --- a/requirements/pip-tools.txt +++ b/requirements/pip-tools.txt @@ -4,14 +4,22 @@ # # make upgrade # +build==0.8.0 + # via pip-tools click==8.1.3 # via pip-tools +packaging==21.3 + # via build pep517==0.12.0 - # via pip-tools -pip-tools==6.6.2 + # via build +pip-tools==6.8.0 # via -r requirements/pip-tools.in +pyparsing==3.0.9 + # via packaging tomli==2.0.1 - # via pep517 + # via + # build + # pep517 wheel==0.37.1 # via pip-tools diff --git a/requirements/pip.txt b/requirements/pip.txt index 884d847..16c2b5f 100644 --- a/requirements/pip.txt +++ b/requirements/pip.txt @@ -1,14 +1,14 @@ # -# This file is autogenerated by pip-compile +# This file is autogenerated by pip-compile with python 3.8 # To update, run: # # make upgrade # -wheel==0.36.2 +wheel==0.37.1 # via -r requirements/pip.in # The following packages are considered to be unsafe in a requirements file: -pip==20.3.3 +pip==22.1.2 # via -r requirements/pip.in -setuptools==51.3.3 +setuptools==63.1.0 # via -r requirements/pip.in diff --git a/requirements/quality.txt b/requirements/quality.txt index fb78422..f14a6fa 100644 --- a/requirements/quality.txt +++ b/requirements/quality.txt @@ -8,22 +8,31 @@ asgiref==3.5.2 # via # -r requirements/test.txt # django -astroid==2.11.6 +astroid==2.11.7 # via # pylint # pylint-celery attrs==21.4.0 # via # -r requirements/test.txt + # openedx-events # pytest -bleach==5.0.0 +avro==1.10.0 + # via + # -r requirements/test.txt + # confluent-kafka +bleach==5.0.1 # via readme-renderer certifi==2022.6.15 - # via requests -cffi==1.15.0 + # via + # -r requirements/test.txt + # requests +cffi==1.15.1 # via cryptography -charset-normalizer==2.0.12 - # via requests +charset-normalizer==2.1.0 + # via + # -r requirements/test.txt + # requests click==8.1.3 # via # -r requirements/test.txt @@ -38,25 +47,39 @@ code-annotations==1.3.0 # edx-lint commonmark==0.9.1 # via rich -coverage[toml]==6.4.1 +confluent-kafka[avro,schema-registry]==1.9.0 + # via -r requirements/test.txt +coverage[toml]==6.4.2 # via # -r requirements/test.txt # pytest-cov -cryptography==37.0.2 +cryptography==37.0.4 # via secretstorage dill==0.3.5.1 # via pylint -django==3.2.13 +django==3.2.14 # via # -c https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt # -r requirements/test.txt -docutils==0.18.1 + # openedx-events +docutils==0.19 # via readme-renderer edx-lint==5.2.4 # via -r requirements/quality.in +edx-opaque-keys[django]==2.3.0 + # via + # -r requirements/test.txt + # openedx-events +fastavro==1.5.2 + # via + # -r requirements/test.txt + # confluent-kafka + # openedx-events idna==3.3 - # via requests -importlib-metadata==4.11.4 + # via + # -r requirements/test.txt + # requests +importlib-metadata==4.12.0 # via # keyring # twine @@ -86,6 +109,8 @@ markupsafe==2.1.1 # jinja2 mccabe==0.7.0 # via pylint +openedx-events==0.10.0 + # via -r requirements/test.txt packaging==21.3 # via # -r requirements/test.txt @@ -116,7 +141,7 @@ pygments==2.12.0 # via # readme-renderer # rich -pylint==2.14.2 +pylint==2.14.4 # via # edx-lint # pylint-celery @@ -130,6 +155,10 @@ pylint-plugin-utils==0.7 # via # pylint-celery # pylint-django +pymongo==3.12.3 + # via + # -r requirements/test.txt + # edx-opaque-keys pyparsing==3.0.9 # via # -r requirements/test.txt @@ -157,15 +186,17 @@ pyyaml==6.0 # code-annotations readme-renderer==35.0 # via twine -requests==2.28.0 +requests==2.28.1 # via + # -r requirements/test.txt + # confluent-kafka # requests-toolbelt # twine requests-toolbelt==0.9.1 # via twine rfc3986==2.0.0 # via twine -rich==12.4.4 +rich==12.5.1 # via twine secretstorage==3.3.2 # via keyring @@ -179,10 +210,11 @@ sqlparse==0.4.2 # via # -r requirements/test.txt # django -stevedore==3.5.0 +stevedore==4.0.0 # via # -r requirements/test.txt # code-annotations + # edx-opaque-keys text-unidecode==1.3 # via # -r requirements/test.txt @@ -193,24 +225,25 @@ tomli==2.0.1 # coverage # pylint # pytest -tomlkit==0.11.0 +tomlkit==0.11.1 # via pylint twine==4.0.1 # via -r requirements/quality.in -typing-extensions==4.2.0 +typing-extensions==4.3.0 # via # astroid # pylint # rich -urllib3==1.26.9 +urllib3==1.26.10 # via + # -r requirements/test.txt # requests # twine webencodings==0.5.1 # via bleach wrapt==1.14.1 # via astroid -zipp==3.8.0 +zipp==3.8.1 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements/test.txt b/requirements/test.txt index ad03b97..49cee9e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -9,30 +9,69 @@ asgiref==3.5.2 # -r requirements/base.txt # django attrs==21.4.0 - # via pytest + # via + # -r requirements/base.txt + # openedx-events + # pytest +avro==1.10.0 + # via + # -r requirements/base.txt + # confluent-kafka +certifi==2022.6.15 + # via + # -r requirements/base.txt + # requests +charset-normalizer==2.1.0 + # via + # -r requirements/base.txt + # requests click==8.1.3 # via code-annotations code-annotations==1.3.0 # via -r requirements/test.in -coverage[toml]==6.4.1 +confluent-kafka[avro,schema-registry]==1.9.0 + # via -r requirements/base.txt +coverage[toml]==6.4.2 # via pytest-cov # via # -c https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt # -r requirements/base.txt + # openedx-events +edx-opaque-keys[django]==2.3.0 + # via + # -r requirements/base.txt + # openedx-events +fastavro==1.5.2 + # via + # -r requirements/base.txt + # confluent-kafka + # openedx-events +idna==3.3 + # via + # -r requirements/base.txt + # requests iniconfig==1.1.1 # via pytest jinja2==3.1.2 # via code-annotations markupsafe==2.1.1 # via jinja2 +openedx-events==0.10.0 + # via -r requirements/base.txt packaging==21.3 # via pytest pbr==5.9.0 - # via stevedore + # via + # -r requirements/base.txt + # stevedore pluggy==1.0.0 # via pytest py==1.11.0 # via pytest +pymongo==3.12.3 + # via + # -r requirements/base.txt + # edx-opaque-keys pyparsing==3.0.9 # via packaging pytest==7.1.2 @@ -51,15 +90,26 @@ pytz==2022.1 # django pyyaml==6.0 # via code-annotations +requests==2.28.1 + # via + # -r requirements/base.txt + # confluent-kafka sqlparse==0.4.2 # via # -r requirements/base.txt # django -stevedore==3.5.0 - # via code-annotations +stevedore==4.0.0 + # via + # -r requirements/base.txt + # code-annotations + # edx-opaque-keys text-unidecode==1.3 # via python-slugify tomli==2.0.1 # via # coverage # pytest +urllib3==1.26.10 + # via + # -r requirements/base.txt + # requests diff --git a/tox.ini b/tox.ini index d7ad330..f3ba80a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py38-django{32,40}, quality, docs, pii_check +envlist = py38-django{32,40} [doc8] ; D001 = Line too long