From ba6c4019de744c5dbcafaa3188a188cb50593897 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 29 Sep 2023 10:12:57 +0200 Subject: [PATCH 01/58] chore: support for cdk watch to speedup dev Signed-off-by: heitorlessa --- cdk.json | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/cdk.json b/cdk.json index 9f78038..7582b71 100644 --- a/cdk.json +++ b/cdk.json @@ -1,3 +1,11 @@ { - "app": "poetry run python app.py" + "app": "poetry run python app.py", + "watch": { + "include": "product/**", + "exclude": [ + "product/**/*.pyc", + "product/**/__pycache__" + ] + }, + "build": "make build" } \ No newline at end of file From 01bb8000147337b13d3feadb3c9a68d0a138c2ae Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 29 Sep 2023 10:39:42 +0200 Subject: [PATCH 02/58] fix: ensure pytest only searches tests folder Signed-off-by: heitorlessa --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index e7bdd51..3098524 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,3 +74,6 @@ skip = [ [tool.yapfignore] ignore_patterns = [".git", ".venv", ".build", "cdk.out", "node_modules"] + +[tool.pytest.ini_options] +testpaths = "tests" From a452f8667a2c44eae76f5b04f69576757614f38d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 29 Sep 2023 10:39:57 +0200 Subject: [PATCH 03/58] chore: cdk watch for infra too Signed-off-by: heitorlessa --- cdk.json | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/cdk.json b/cdk.json index 7582b71..681e70a 100644 --- a/cdk.json +++ b/cdk.json @@ -1,10 +1,15 @@ { "app": "poetry run python app.py", "watch": { - "include": "product/**", + "include": [ + "product/**", + "infrastructure/product/**" + ], "exclude": [ "product/**/*.pyc", - "product/**/__pycache__" + "product/**/__pycache__", + "infrastructure/**/*.pyc", + "infrastructure/**/__pycache__" ] }, "build": "make build" From 57fc38e0cdc41f9901bc50a424ea0766e6a93764 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 29 Sep 2023 11:01:34 +0200 Subject: [PATCH 04/58] chore: use getpass over outdated getlogin Signed-off-by: heitorlessa --- infrastructure/product/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infrastructure/product/utils.py b/infrastructure/product/utils.py index 0b2f13b..fdeb1c8 100644 --- a/infrastructure/product/utils.py +++ b/infrastructure/product/utils.py @@ -1,4 +1,4 @@ -import os +import getpass from pathlib import Path from git import Repo @@ -8,7 +8,7 @@ def get_username() -> str: try: - return os.getlogin().replace('.', '-') + return getpass.getuser().replace('.', '-') except Exception: return 'github' From 880834e99fe91883360f5db87c1f2b16c7533c40 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 29 Sep 2023 11:01:52 +0200 Subject: [PATCH 05/58] chore: allow branches with _ Signed-off-by: heitorlessa --- infrastructure/product/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/product/utils.py b/infrastructure/product/utils.py index fdeb1c8..9af7902 100644 --- a/infrastructure/product/utils.py +++ b/infrastructure/product/utils.py @@ -17,7 +17,7 @@ def get_stack_name() -> str: repo = Repo(Path.cwd()) username = get_username() try: - branch_name = f'{repo.active_branch}'.replace('/', '-') + branch_name = f'{repo.active_branch}'.replace('/', '-').replace('_', '-') return f'{username}-{branch_name}-{constants.SERVICE_NAME}' except TypeError: # we're running in detached mode (HEAD) From 6d5fa6d8843d56586719a920956751f522450e06 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 29 Sep 2023 11:05:41 +0200 Subject: [PATCH 06/58] chore: standardize stream resource names Signed-off-by: heitorlessa --- infrastructure/product/constants.py | 8 +++++--- infrastructure/product/service_stack.py | 2 +- .../product/stream_processor_construct.py | 14 +++++++------- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/infrastructure/product/constants.py b/infrastructure/product/constants.py index b37ac06..ca2c326 100644 --- a/infrastructure/product/constants.py +++ b/infrastructure/product/constants.py @@ -1,5 +1,4 @@ SERVICE_ROLE_ARN = 'ServiceRoleArn' -STREAM_PROC_SERVICE_ROLE_ARN = 'StreamRoleArn' LAMBDA_BASIC_EXECUTION_ROLE = 'AWSLambdaBasicExecutionRole' CREATE_PRODUCT_ROLE = 'ServiceRole' DELETE_PRODUCT_ROLE = 'DeleteRole' @@ -28,8 +27,11 @@ BUILD_FOLDER = '.build/lambdas/' COMMON_LAYER_BUILD_FOLDER = '.build/common_layer' CRUD_CONSTRUCT_NAME = 'Crud' -STREAM_PROC_CONSTRUCT_NAME = 'StreamProc' OWNER_TAG = 'owner' REST_API_NAME = 'crud-rest-api' EVENT_BUS_NAME = 'events' -STREAM_PROC_LAMBDA = 'Stream' +STREAM_PROCESSOR_CONSTRUCT_NAME = 'StreamProc' +STREAM_PROCESSOR_LAMBDA = 'StreamProcessor' +STREAM_PROCESSOR_LAMBDA_MEMORY_SIZE = 128 # MB +STREAM_PROCESSOR_LAMBDA_TIMEOUT = 120 # seconds +STREAM_PROCESSOR_LAMBDA_SERVICE_ROLE_ARN = 'StreamRoleArn' diff --git a/infrastructure/product/service_stack.py b/infrastructure/product/service_stack.py index ee23fb3..c966531 100644 --- a/infrastructure/product/service_stack.py +++ b/infrastructure/product/service_stack.py @@ -25,7 +25,7 @@ def __init__(self, scope: Construct, id: str, **kwargs) -> None: self.stream_processor = StreamProcessorConstruct( self, - id_=get_construct_name(id, constants.STREAM_PROC_CONSTRUCT_NAME), + id_=constants.STREAM_PROCESSOR_CONSTRUCT_NAME, lambda_layer=self.shared_layer, dynamodb_table=self.api.api_db.db, ) diff --git a/infrastructure/product/stream_processor_construct.py b/infrastructure/product/stream_processor_construct.py index 0200375..519f600 100644 --- a/infrastructure/product/stream_processor_construct.py +++ b/infrastructure/product/stream_processor_construct.py @@ -17,14 +17,14 @@ def __init__(self, scope: Construct, id_: str, lambda_layer: PythonLayerVersion, super().__init__(scope, id_) bus_name = f'{id_}{constants.EVENT_BUS_NAME}' self.event_bus = events.EventBus(self, bus_name, event_bus_name=bus_name) - self.role = self._build_lambda_role(dynamodb_table, self.event_bus) + self.role = self._build_lambda_role(db=dynamodb_table, bus=self.event_bus) self.lambda_function = self._build_stream_processor_lambda(self.role, lambda_layer, dynamodb_table) def _build_lambda_role(self, db: dynamodb.Table, bus: events.EventBus) -> iam.Role: return iam.Role( self, - constants.STREAM_PROC_SERVICE_ROLE_ARN, + id=constants.STREAM_PROCESSOR_LAMBDA_SERVICE_ROLE_ARN, assumed_by=iam.ServicePrincipal('lambda.amazonaws.com'), inline_policies={ 'streams': @@ -51,21 +51,21 @@ def _build_lambda_role(self, db: dynamodb.Table, bus: events.EventBus) -> iam.Ro def _build_stream_processor_lambda(self, role: iam.Role, lambda_layer: PythonLayerVersion, dynamodb_table: dynamodb.Table) -> _lambda.Function: lambda_function = _lambda.Function( self, - constants.STREAM_PROC_LAMBDA, + id=constants.STREAM_PROCESSOR_LAMBDA, runtime=_lambda.Runtime.PYTHON_3_11, code=_lambda.Code.from_asset(constants.BUILD_FOLDER), - handler='product.stream_processor.handlers.stream_handler.handle_events', + handler='product.stream_processor.handlers.stream_handler.process_stream', environment={ constants.POWERTOOLS_SERVICE_NAME: constants.SERVICE_NAME, # for logger, tracer and metrics constants.POWER_TOOLS_LOG_LEVEL: 'DEBUG', # for logger }, tracing=_lambda.Tracing.ACTIVE, retry_attempts=0, - timeout=Duration.seconds(constants.API_HANDLER_LAMBDA_TIMEOUT), - memory_size=constants.API_HANDLER_LAMBDA_MEMORY_SIZE, + timeout=Duration.seconds(constants.STREAM_PROCESSOR_LAMBDA_TIMEOUT), + memory_size=constants.STREAM_PROCESSOR_LAMBDA_MEMORY_SIZE, layers=[lambda_layer], role=role, - log_retention=RetentionDays.ONE_DAY, + log_retention=RetentionDays.FIVE_DAYS, ) # Add DynamoDB Stream as an event source for the Lambda function lambda_function.add_event_source(DynamoEventSource(dynamodb_table, starting_position=_lambda.StartingPosition.LATEST)) From 409b58eb60e38456988b2d01575b185569ce4d77 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 29 Sep 2023 11:06:24 +0200 Subject: [PATCH 07/58] chore: add hello world stream proc lambda Signed-off-by: heitorlessa --- product/stream_processor/handlers/stream_handler.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/product/stream_processor/handlers/stream_handler.py b/product/stream_processor/handlers/stream_handler.py index e69de29..1714402 100644 --- a/product/stream_processor/handlers/stream_handler.py +++ b/product/stream_processor/handlers/stream_handler.py @@ -0,0 +1,9 @@ +from typing import Any +from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools import Logger + +logger = Logger() + +@logger.inject_lambda_context(log_event=True) +def process_stream(event: dict[str, Any], context: LambdaContext): + return 'Hello from the stream!' From afbac00c9bd6f92b70be1a993f96429288b80848 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 15:41:16 +0200 Subject: [PATCH 08/58] chore: add watch target in makefile Signed-off-by: heitorlessa --- Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Makefile b/Makefile index 9c8c43e..b4715c2 100644 --- a/Makefile +++ b/Makefile @@ -68,3 +68,6 @@ docs: lint-docs: docker run -v ${PWD}:/markdown 06kellyjac/markdownlint-cli --fix "docs" + +watch: + npx cdk watch From 8d7dc71a048003b094d77e5eea2317ac88933540 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 17:19:33 +0200 Subject: [PATCH 09/58] chore: make product models independent Signed-off-by: heitorlessa --- product/models/__init__.py | 0 product/models/products/__init__.py | 0 product/models/products/product.py | 16 ++++++++++++++++ product/models/products/validators.py | 9 +++++++++ 4 files changed, 25 insertions(+) create mode 100644 product/models/__init__.py create mode 100644 product/models/products/__init__.py create mode 100644 product/models/products/product.py create mode 100644 product/models/products/validators.py diff --git a/product/models/__init__.py b/product/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/product/models/products/__init__.py b/product/models/products/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/product/models/products/product.py b/product/models/products/product.py new file mode 100644 index 0000000..a517513 --- /dev/null +++ b/product/models/products/product.py @@ -0,0 +1,16 @@ +from typing import Annotated + +from pydantic import BaseModel, Field, PositiveInt +from pydantic.functional_validators import AfterValidator + +from product.models.products.validators import validate_product_id + +ProductId = Annotated[ + str, Field(min_length=36, max_length=36), AfterValidator(validate_product_id) +] + + +class Product(BaseModel): + name: Annotated[str, Field(min_length=1, max_length=30)] + id: ProductId + price: PositiveInt diff --git a/product/models/products/validators.py b/product/models/products/validators.py new file mode 100644 index 0000000..5c5ae49 --- /dev/null +++ b/product/models/products/validators.py @@ -0,0 +1,9 @@ +from uuid import UUID + + +def validate_product_id(product_id: str) -> str: + try: + UUID(product_id, version=4) + except Exception as exc: + raise ValueError(str(exc)) from exc + return product_id From 1821ef914e33d14ca1913c4af3358dbb9420e023 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 18:50:05 +0200 Subject: [PATCH 10/58] chore: remove stream specific schema for now Signed-off-by: heitorlessa --- product/stream_processor/schemas/__init__.py | 0 product/stream_processor/schemas/input.py | 0 product/stream_processor/schemas/output.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 product/stream_processor/schemas/__init__.py delete mode 100644 product/stream_processor/schemas/input.py delete mode 100644 product/stream_processor/schemas/output.py diff --git a/product/stream_processor/schemas/__init__.py b/product/stream_processor/schemas/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/product/stream_processor/schemas/input.py b/product/stream_processor/schemas/input.py deleted file mode 100644 index e69de29..0000000 diff --git a/product/stream_processor/schemas/output.py b/product/stream_processor/schemas/output.py deleted file mode 100644 index e69de29..0000000 From 956a2e155ee0a5c252cd4431d102aac3c7875bf1 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 18:53:27 +0200 Subject: [PATCH 11/58] chore(domain): create skeleton to notify updates Signed-off-by: heitorlessa --- product/models/products/product.py | 12 ++++++++++-- .../domain_logic/product_notification.py | 12 ++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 product/stream_processor/domain_logic/product_notification.py diff --git a/product/models/products/product.py b/product/models/products/product.py index a517513..1669fc3 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -1,4 +1,4 @@ -from typing import Annotated +from typing import Annotated, Literal from pydantic import BaseModel, Field, PositiveInt from pydantic.functional_validators import AfterValidator @@ -9,8 +9,16 @@ str, Field(min_length=36, max_length=36), AfterValidator(validate_product_id) ] +ProductName = Annotated[str, Field(min_length=1, max_length=30)] + class Product(BaseModel): - name: Annotated[str, Field(min_length=1, max_length=30)] + name: ProductName id: ProductId price: PositiveInt + + +class ProductNotification(BaseModel): + product_id: ProductId + product_name: ProductName + change_status: Literal['ADDED', 'REMOVED', 'UPDATED'] diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py new file mode 100644 index 0000000..840d9d9 --- /dev/null +++ b/product/stream_processor/domain_logic/product_notification.py @@ -0,0 +1,12 @@ +from typing import TypeVar + +from product.models.products.product import ProductNotification + +# Until DAL gets created +EventHandler = TypeVar('EventHandler') + + +def notify_product_updates( + update: list[ProductNotification], event_handler: EventHandler +): + ... From 657bdb873719f37e7f3cf6c0e118d719160443a1 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 20:29:26 +0200 Subject: [PATCH 12/58] chore: add placeholder code for handler Signed-off-by: heitorlessa --- product/models/products/product.py | 7 ++- .../domain_logic/product_notification.py | 4 +- .../handlers/stream_handler.py | 43 +++++++++++++++++-- 3 files changed, 44 insertions(+), 10 deletions(-) diff --git a/product/models/products/product.py b/product/models/products/product.py index 1669fc3..47abe95 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -1,3 +1,4 @@ +from datetime import datetime from typing import Annotated, Literal from pydantic import BaseModel, Field, PositiveInt @@ -9,16 +10,14 @@ str, Field(min_length=36, max_length=36), AfterValidator(validate_product_id) ] -ProductName = Annotated[str, Field(min_length=1, max_length=30)] - class Product(BaseModel): - name: ProductName + name: Annotated[str, Field(min_length=1, max_length=30)] id: ProductId price: PositiveInt class ProductNotification(BaseModel): product_id: ProductId - product_name: ProductName change_status: Literal['ADDED', 'REMOVED', 'UPDATED'] + created_at: datetime = Field(default_factory=datetime.utcnow) diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index 840d9d9..b441899 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -7,6 +7,6 @@ def notify_product_updates( - update: list[ProductNotification], event_handler: EventHandler + update: list[ProductNotification], event_handler: EventHandler | None = None ): - ... + return update diff --git a/product/stream_processor/handlers/stream_handler.py b/product/stream_processor/handlers/stream_handler.py index 1714402..78c2ad6 100644 --- a/product/stream_processor/handlers/stream_handler.py +++ b/product/stream_processor/handlers/stream_handler.py @@ -1,9 +1,44 @@ -from typing import Any -from aws_lambda_powertools.utilities.typing import LambdaContext +from typing import Any, TypeVar +from product.stream_processor.domain_logic.product_notification import ( + notify_product_updates, +) +from product.models.products.product import ProductNotification from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBStreamEvent, +) logger = Logger() +# Until DAL gets created +EventHandler = TypeVar('EventHandler') + @logger.inject_lambda_context(log_event=True) -def process_stream(event: dict[str, Any], context: LambdaContext): - return 'Hello from the stream!' +def process_stream( + event: dict[str, Any], + context: LambdaContext, + event_handler: EventHandler | None = None, +) -> list[ProductNotification]: + # Until we create our handler product stream change input + stream_records = DynamoDBStreamEvent(event) + + product_updates = [] + for record in stream_records.records: + product_id = record.dynamodb.keys.get('id', '') # type: ignore[union-attr] + + match record.event_name: + case record.event_name.INSERT: # type: ignore[union-attr] + product_updates.append( + ProductNotification(product_id=product_id, change_status='ADDED') + ) + case record.event_name.MODIFY: # type: ignore[union-attr] + product_updates.append( + ProductNotification(product_id=product_id, change_status='UPDATED') + ) + case record.event_name.REMOVE: # type: ignore[union-attr] + product_updates.append( + ProductNotification(product_id=product_id, change_status='REMOVED') + ) + + return notify_product_updates(update=product_updates, event_handler=event_handler) From a622cf2c850ad7caddbf37c8c6556370d3f22413 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 20:30:31 +0200 Subject: [PATCH 13/58] chore: add placeholder unit test Signed-off-by: heitorlessa --- tests/unit/stream_processor/__init__.py | 0 tests/unit/stream_processor/data_builder.py | 53 +++++++++++++++++++ .../stream_processor/test_stream_handler.py | 16 ++++++ tests/utils.py | 5 ++ 4 files changed, 74 insertions(+) create mode 100644 tests/unit/stream_processor/__init__.py create mode 100644 tests/unit/stream_processor/data_builder.py create mode 100644 tests/unit/stream_processor/test_stream_handler.py diff --git a/tests/unit/stream_processor/__init__.py b/tests/unit/stream_processor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/stream_processor/data_builder.py b/tests/unit/stream_processor/data_builder.py new file mode 100644 index 0000000..8f07bd7 --- /dev/null +++ b/tests/unit/stream_processor/data_builder.py @@ -0,0 +1,53 @@ +"""This will be replaced with hypothesis later""" +import random +import time +from typing import Any + + +def generate_dynamodb_stream_events( + product_id: str = '8c18c85a-0f10-4b73-b54a-07ab0d381018', +) -> dict[str, Any]: + return { + 'Records': [ + { + 'eventID': 'af0065970f39f49c7d014079db1b86ce', + 'eventName': 'INSERT', + 'eventVersion': '1.1', + 'eventSource': 'aws:dynamodb', + 'awsRegion': 'eu-west-1', + 'dynamodb': { + 'ApproximateCreationDateTime': time.time(), + 'Keys': {'id': {'S': f'{product_id}'}}, + 'NewImage': { + 'price': {'N': '1'}, + 'name': {'S': 'test'}, + 'id': {'S': f'{product_id}'}, + }, + 'SequenceNumber': f'{random.randint(a=10**24, b=10**25 - 1)}', + 'SizeBytes': 91, + 'StreamViewType': 'NEW_AND_OLD_IMAGES', + }, + 'eventSourceARN': 'arn:aws:dynamodb:eu-west-1:123456789012:table/lessa-stream-processor-ProductCruddbproducts/stream/2023-09-29T09:00:01.491', + }, + { + 'eventID': '4ef9babf010f884033a2bd761105f392', + 'eventName': 'REMOVE', + 'eventVersion': '1.1', + 'eventSource': 'aws:dynamodb', + 'awsRegion': 'eu-west-1', + 'dynamodb': { + 'ApproximateCreationDateTime': time.time(), + 'Keys': {'id': {'S': f'{product_id}'}}, + 'OldImage': { + 'price': {'N': '1'}, + 'name': {'S': 'test'}, + 'id': {'S': f'{product_id}'}, + }, + 'SequenceNumber': f'{random.randint(a=10**24, b=10**25 - 1)}', + 'SizeBytes': 91, + 'StreamViewType': 'NEW_AND_OLD_IMAGES', + }, + 'eventSourceARN': 'arn:aws:dynamodb:eu-west-1:123456789012:table/lessa-stream-processor-ProductCruddbproducts/stream/2023-09-29T09:00:01.491', + }, + ] + } diff --git a/tests/unit/stream_processor/test_stream_handler.py b/tests/unit/stream_processor/test_stream_handler.py new file mode 100644 index 0000000..aefd196 --- /dev/null +++ b/tests/unit/stream_processor/test_stream_handler.py @@ -0,0 +1,16 @@ +from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events +from product.stream_processor.handlers.stream_handler import process_stream +from tests.utils import generate_context +import uuid + + +def test_process_stream_notify_product_updates(): + # GIVEN + product_id = f'{uuid.uuid4()}' + events = generate_dynamodb_stream_events(product_id=product_id) + + # WHEN + ret = process_stream(events, generate_context()) + + # THEN + assert all(product.product_id == product_id for product in ret) diff --git a/tests/utils.py b/tests/utils.py index 5fedb2e..1c00a18 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -16,6 +16,11 @@ def generate_random_string(length: int = 7): def generate_context() -> LambdaContext: context = LambdaContext() context._aws_request_id = '888888' + context._function_name = 'test' + context._memory_limit_in_mb = 128 + context._invoked_function_arn = ( + 'arn:aws:lambda:eu-west-1:123456789012:function:test' + ) return context From 5cb8b4956d4d7de9d2353deb85c1142b491079fc Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 20:37:06 +0200 Subject: [PATCH 14/58] chore: rename stream processor to align w/ crud Signed-off-by: heitorlessa --- infrastructure/product/stream_processor_construct.py | 2 +- .../handlers/{stream_handler.py => product_stream.py} | 0 .../{test_stream_handler.py => test_process_stream_handler.py} | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename product/stream_processor/handlers/{stream_handler.py => product_stream.py} (100%) rename tests/unit/stream_processor/{test_stream_handler.py => test_process_stream_handler.py} (87%) diff --git a/infrastructure/product/stream_processor_construct.py b/infrastructure/product/stream_processor_construct.py index 519f600..f9d1d1b 100644 --- a/infrastructure/product/stream_processor_construct.py +++ b/infrastructure/product/stream_processor_construct.py @@ -54,7 +54,7 @@ def _build_stream_processor_lambda(self, role: iam.Role, lambda_layer: PythonLay id=constants.STREAM_PROCESSOR_LAMBDA, runtime=_lambda.Runtime.PYTHON_3_11, code=_lambda.Code.from_asset(constants.BUILD_FOLDER), - handler='product.stream_processor.handlers.stream_handler.process_stream', + handler='product.stream_processor.handlers.product_stream.process_stream', environment={ constants.POWERTOOLS_SERVICE_NAME: constants.SERVICE_NAME, # for logger, tracer and metrics constants.POWER_TOOLS_LOG_LEVEL: 'DEBUG', # for logger diff --git a/product/stream_processor/handlers/stream_handler.py b/product/stream_processor/handlers/product_stream.py similarity index 100% rename from product/stream_processor/handlers/stream_handler.py rename to product/stream_processor/handlers/product_stream.py diff --git a/tests/unit/stream_processor/test_stream_handler.py b/tests/unit/stream_processor/test_process_stream_handler.py similarity index 87% rename from tests/unit/stream_processor/test_stream_handler.py rename to tests/unit/stream_processor/test_process_stream_handler.py index aefd196..5d575bb 100644 --- a/tests/unit/stream_processor/test_stream_handler.py +++ b/tests/unit/stream_processor/test_process_stream_handler.py @@ -1,5 +1,5 @@ from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events -from product.stream_processor.handlers.stream_handler import process_stream +from product.stream_processor.handlers.product_stream import process_stream from tests.utils import generate_context import uuid From 68788d018184a6b31726e30cb8a3de2c4078f857 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 20:51:37 +0200 Subject: [PATCH 15/58] chore: add format-fix, fix yapf errors Signed-off-by: heitorlessa --- Makefile | 4 + infrastructure/product/constants.py | 4 +- product/models/products/product.py | 4 +- .../domain_logic/product_notification.py | 4 +- .../handlers/product_stream.py | 24 ++---- tests/unit/stream_processor/data_builder.py | 76 +++++++++++++------ .../test_process_stream_handler.py | 5 +- tests/utils.py | 4 +- 8 files changed, 73 insertions(+), 52 deletions(-) diff --git a/Makefile b/Makefile index b4715c2..e9c14ab 100644 --- a/Makefile +++ b/Makefile @@ -14,6 +14,10 @@ format: poetry run isort . poetry run yapf -d -vv --style=./.style -r . +format-fix: + poetry run isort . + poetry run yapf -vv --style=./.style -r --in-place . + lint: format @echo "Running flake8" poetry run flake8 product/* infrastructure/* tests/* docs/examples/* diff --git a/infrastructure/product/constants.py b/infrastructure/product/constants.py index ca2c326..bc1a8bf 100644 --- a/infrastructure/product/constants.py +++ b/infrastructure/product/constants.py @@ -32,6 +32,6 @@ EVENT_BUS_NAME = 'events' STREAM_PROCESSOR_CONSTRUCT_NAME = 'StreamProc' STREAM_PROCESSOR_LAMBDA = 'StreamProcessor' -STREAM_PROCESSOR_LAMBDA_MEMORY_SIZE = 128 # MB -STREAM_PROCESSOR_LAMBDA_TIMEOUT = 120 # seconds +STREAM_PROCESSOR_LAMBDA_MEMORY_SIZE = 128 # MB +STREAM_PROCESSOR_LAMBDA_TIMEOUT = 120 # seconds STREAM_PROCESSOR_LAMBDA_SERVICE_ROLE_ARN = 'StreamRoleArn' diff --git a/product/models/products/product.py b/product/models/products/product.py index 47abe95..a38abb6 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -6,9 +6,7 @@ from product.models.products.validators import validate_product_id -ProductId = Annotated[ - str, Field(min_length=36, max_length=36), AfterValidator(validate_product_id) -] +ProductId = Annotated[str, Field(min_length=36, max_length=36), AfterValidator(validate_product_id)] class Product(BaseModel): diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index b441899..8d22aca 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -6,7 +6,5 @@ EventHandler = TypeVar('EventHandler') -def notify_product_updates( - update: list[ProductNotification], event_handler: EventHandler | None = None -): +def notify_product_updates(update: list[ProductNotification], event_handler: EventHandler | None = None): return update diff --git a/product/stream_processor/handlers/product_stream.py b/product/stream_processor/handlers/product_stream.py index 78c2ad6..1d1877e 100644 --- a/product/stream_processor/handlers/product_stream.py +++ b/product/stream_processor/handlers/product_stream.py @@ -1,13 +1,11 @@ from typing import Any, TypeVar -from product.stream_processor.domain_logic.product_notification import ( - notify_product_updates, -) -from product.models.products.product import ProductNotification + from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBStreamEvent from aws_lambda_powertools.utilities.typing import LambdaContext -from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( - DynamoDBStreamEvent, -) + +from product.models.products.product import ProductNotification +from product.stream_processor.domain_logic.product_notification import notify_product_updates logger = Logger() # Until DAL gets created @@ -29,16 +27,10 @@ def process_stream( match record.event_name: case record.event_name.INSERT: # type: ignore[union-attr] - product_updates.append( - ProductNotification(product_id=product_id, change_status='ADDED') - ) + product_updates.append(ProductNotification(product_id=product_id, change_status='ADDED')) case record.event_name.MODIFY: # type: ignore[union-attr] - product_updates.append( - ProductNotification(product_id=product_id, change_status='UPDATED') - ) + product_updates.append(ProductNotification(product_id=product_id, change_status='UPDATED')) case record.event_name.REMOVE: # type: ignore[union-attr] - product_updates.append( - ProductNotification(product_id=product_id, change_status='REMOVED') - ) + product_updates.append(ProductNotification(product_id=product_id, change_status='REMOVED')) return notify_product_updates(update=product_updates, event_handler=event_handler) diff --git a/tests/unit/stream_processor/data_builder.py b/tests/unit/stream_processor/data_builder.py index 8f07bd7..b48ce3e 100644 --- a/tests/unit/stream_processor/data_builder.py +++ b/tests/unit/stream_processor/data_builder.py @@ -4,50 +4,80 @@ from typing import Any -def generate_dynamodb_stream_events( - product_id: str = '8c18c85a-0f10-4b73-b54a-07ab0d381018', -) -> dict[str, Any]: +def generate_dynamodb_stream_events(product_id: str = '8c18c85a-0f10-4b73-b54a-07ab0d381018',) -> dict[str, Any]: return { 'Records': [ { - 'eventID': 'af0065970f39f49c7d014079db1b86ce', - 'eventName': 'INSERT', - 'eventVersion': '1.1', - 'eventSource': 'aws:dynamodb', - 'awsRegion': 'eu-west-1', + 'eventID': + 'af0065970f39f49c7d014079db1b86ce', + 'eventName': + 'INSERT', + 'eventVersion': + '1.1', + 'eventSource': + 'aws:dynamodb', + 'awsRegion': + 'eu-west-1', 'dynamodb': { 'ApproximateCreationDateTime': time.time(), - 'Keys': {'id': {'S': f'{product_id}'}}, + 'Keys': { + 'id': { + 'S': f'{product_id}' + } + }, 'NewImage': { - 'price': {'N': '1'}, - 'name': {'S': 'test'}, - 'id': {'S': f'{product_id}'}, + 'price': { + 'N': '1' + }, + 'name': { + 'S': 'test' + }, + 'id': { + 'S': f'{product_id}' + }, }, 'SequenceNumber': f'{random.randint(a=10**24, b=10**25 - 1)}', 'SizeBytes': 91, 'StreamViewType': 'NEW_AND_OLD_IMAGES', }, - 'eventSourceARN': 'arn:aws:dynamodb:eu-west-1:123456789012:table/lessa-stream-processor-ProductCruddbproducts/stream/2023-09-29T09:00:01.491', + 'eventSourceARN': + 'arn:aws:dynamodb:eu-west-1:123456789012:table/lessa-stream-processor-ProductCruddbproducts/stream/2023-09-29T09:00:01.491', }, { - 'eventID': '4ef9babf010f884033a2bd761105f392', - 'eventName': 'REMOVE', - 'eventVersion': '1.1', - 'eventSource': 'aws:dynamodb', - 'awsRegion': 'eu-west-1', + 'eventID': + '4ef9babf010f884033a2bd761105f392', + 'eventName': + 'REMOVE', + 'eventVersion': + '1.1', + 'eventSource': + 'aws:dynamodb', + 'awsRegion': + 'eu-west-1', 'dynamodb': { 'ApproximateCreationDateTime': time.time(), - 'Keys': {'id': {'S': f'{product_id}'}}, + 'Keys': { + 'id': { + 'S': f'{product_id}' + } + }, 'OldImage': { - 'price': {'N': '1'}, - 'name': {'S': 'test'}, - 'id': {'S': f'{product_id}'}, + 'price': { + 'N': '1' + }, + 'name': { + 'S': 'test' + }, + 'id': { + 'S': f'{product_id}' + }, }, 'SequenceNumber': f'{random.randint(a=10**24, b=10**25 - 1)}', 'SizeBytes': 91, 'StreamViewType': 'NEW_AND_OLD_IMAGES', }, - 'eventSourceARN': 'arn:aws:dynamodb:eu-west-1:123456789012:table/lessa-stream-processor-ProductCruddbproducts/stream/2023-09-29T09:00:01.491', + 'eventSourceARN': + 'arn:aws:dynamodb:eu-west-1:123456789012:table/lessa-stream-processor-ProductCruddbproducts/stream/2023-09-29T09:00:01.491', }, ] } diff --git a/tests/unit/stream_processor/test_process_stream_handler.py b/tests/unit/stream_processor/test_process_stream_handler.py index 5d575bb..4987476 100644 --- a/tests/unit/stream_processor/test_process_stream_handler.py +++ b/tests/unit/stream_processor/test_process_stream_handler.py @@ -1,7 +1,8 @@ -from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events +import uuid + from product.stream_processor.handlers.product_stream import process_stream +from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events from tests.utils import generate_context -import uuid def test_process_stream_notify_product_updates(): diff --git a/tests/utils.py b/tests/utils.py index 1c00a18..9560b20 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -18,9 +18,7 @@ def generate_context() -> LambdaContext: context._aws_request_id = '888888' context._function_name = 'test' context._memory_limit_in_mb = 128 - context._invoked_function_arn = ( - 'arn:aws:lambda:eu-west-1:123456789012:function:test' - ) + context._invoked_function_arn = ('arn:aws:lambda:eu-west-1:123456789012:function:test') return context From ba92b7f6b1866b6c160a0f05081bc1d71b5f82c8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 21:03:16 +0200 Subject: [PATCH 16/58] fix(tests): stack not found when running integ Signed-off-by: heitorlessa --- tests/integration/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 49895bc..d1222e9 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -18,7 +18,7 @@ def init(): os.environ[POWER_TOOLS_LOG_LEVEL] = 'DEBUG' os.environ['REST_API'] = 'https://www.ranthebuilder.cloud/api' os.environ['ROLE_ARN'] = 'arn:partition:service:region:account-id:resource-type:resource-id' - os.environ['AWS_DEFAULT_REGION'] = 'us-east-1' # used for appconfig mocked boto calls + os.environ['AWS_DEFAULT_REGION'] = os.environ.get('AWS_DEFAULT_REGION', 'us-east-1') # used for appconfig mocked boto calls os.environ['TABLE_NAME'] = get_stack_output(TABLE_NAME_OUTPUT) os.environ['IDEMPOTENCY_TABLE_NAME'] = get_stack_output(IDEMPOTENCY_TABLE_NAME_OUTPUT) From e7b59b7836668b4104e1a42557d0ee123db6e390 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 30 Sep 2023 21:06:15 +0200 Subject: [PATCH 17/58] chore: align handler and fn handler name w/ crud Signed-off-by: heitorlessa --- infrastructure/product/stream_processor_construct.py | 2 +- .../handlers/{product_stream.py => process_stream.py} | 0 tests/unit/stream_processor/test_process_stream_handler.py | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename product/stream_processor/handlers/{product_stream.py => process_stream.py} (100%) diff --git a/infrastructure/product/stream_processor_construct.py b/infrastructure/product/stream_processor_construct.py index f9d1d1b..452bbd3 100644 --- a/infrastructure/product/stream_processor_construct.py +++ b/infrastructure/product/stream_processor_construct.py @@ -54,7 +54,7 @@ def _build_stream_processor_lambda(self, role: iam.Role, lambda_layer: PythonLay id=constants.STREAM_PROCESSOR_LAMBDA, runtime=_lambda.Runtime.PYTHON_3_11, code=_lambda.Code.from_asset(constants.BUILD_FOLDER), - handler='product.stream_processor.handlers.product_stream.process_stream', + handler='product.stream_processor.handlers.process_stream.process_stream', environment={ constants.POWERTOOLS_SERVICE_NAME: constants.SERVICE_NAME, # for logger, tracer and metrics constants.POWER_TOOLS_LOG_LEVEL: 'DEBUG', # for logger diff --git a/product/stream_processor/handlers/product_stream.py b/product/stream_processor/handlers/process_stream.py similarity index 100% rename from product/stream_processor/handlers/product_stream.py rename to product/stream_processor/handlers/process_stream.py diff --git a/tests/unit/stream_processor/test_process_stream_handler.py b/tests/unit/stream_processor/test_process_stream_handler.py index 4987476..fe32114 100644 --- a/tests/unit/stream_processor/test_process_stream_handler.py +++ b/tests/unit/stream_processor/test_process_stream_handler.py @@ -1,6 +1,6 @@ import uuid -from product.stream_processor.handlers.product_stream import process_stream +from product.stream_processor.handlers.process_stream import process_stream from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events from tests.utils import generate_context From 11ab3298f0bcc34f727002248453724ad6c0166a Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 1 Oct 2023 18:02:29 +0200 Subject: [PATCH 18/58] chore: add mypy boto3 events dev dep Signed-off-by: heitorlessa --- poetry.lock | 18 ++++++++++++++++-- pyproject.toml | 3 +++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 75d843c..07a90eb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -1009,6 +1009,20 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} +[[package]] +name = "mypy-boto3-events" +version = "1.28.46" +description = "Type annotations for boto3.EventBridge 1.28.46 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-events-1.28.46.tar.gz", hash = "sha256:fdae2b51c7c13d0045c6a0a7c2ddb735e67ae79077084a28f922870593091ad1"}, + {file = "mypy_boto3_events-1.28.46-py3-none-any.whl", hash = "sha256:1220289549bd3b24a37561c5dbf6f20a9d97f66748b15dbfe9e218cc52cce246"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1983,4 +1997,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11.0" -content-hash = "3bbff053d4521f4e31c16a5dbb26c9b3872feeec0f2000c24d58b82ad7de025f" +content-hash = "df5d8f387315a036546dc403ac7cac7cbd4d845336231390548c3ed488c039bc" diff --git a/pyproject.toml b/pyproject.toml index 3098524..88aa45c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,9 @@ types-requests = "*" toml = "*" +[tool.poetry.group.dev.dependencies] +mypy-boto3-events = "^1.28.46" + [tool.isort] py_version = 311 multi_line_output = 3 From 6b85eb7a28f85784fe20899ff8c261d55f47108a Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 1 Oct 2023 18:11:39 +0200 Subject: [PATCH 19/58] feat: add initial DAL protocol and eventbridge Signed-off-by: heitorlessa --- .../stream_processor/dal/events/__init__.py | 0 product/stream_processor/dal/events/base.py | 22 +++++++++ .../dal/events/event_handler.py | 14 ++++++ .../dal/events/providers/__init__.py | 0 .../dal/events/providers/eventbridge.py | 36 ++++++++++++++ .../domain_logic/product_notification.py | 14 ++++-- .../handlers/process_stream.py | 7 ++- .../test_process_stream_handler.py | 47 +++++++++++++++---- 8 files changed, 123 insertions(+), 17 deletions(-) create mode 100644 product/stream_processor/dal/events/__init__.py create mode 100644 product/stream_processor/dal/events/base.py create mode 100644 product/stream_processor/dal/events/event_handler.py create mode 100644 product/stream_processor/dal/events/providers/__init__.py create mode 100644 product/stream_processor/dal/events/providers/eventbridge.py diff --git a/product/stream_processor/dal/events/__init__.py b/product/stream_processor/dal/events/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py new file mode 100644 index 0000000..d849e2a --- /dev/null +++ b/product/stream_processor/dal/events/base.py @@ -0,0 +1,22 @@ +from abc import ABC, abstractmethod +from typing import Any, Generic, TypeVar + +T = TypeVar('T') +R = TypeVar('R') + + +class EventProvider(ABC, Generic[T, R]): + + @abstractmethod + def send(self, payload: T) -> R: + ... + + +class EventHandler(ABC, Generic[T, R]): + + def __init__(self, emitter: EventProvider) -> None: + ... + + @abstractmethod + def emit(self, payload: list[T], metadata: dict[str, Any] | None = None) -> R: + ... diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py new file mode 100644 index 0000000..67b77de --- /dev/null +++ b/product/stream_processor/dal/events/event_handler.py @@ -0,0 +1,14 @@ +from typing import Any + +from product.models.products.product import ProductNotification +from product.stream_processor.dal.events.base import EventHandler, EventProvider + + +class ProductNotificationHandler(EventHandler): + + def __init__(self, provider: EventProvider) -> None: + self.provider = provider + + def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None) -> dict[str, Any]: + metadata = metadata or {} + return self.provider.send(payload) diff --git a/product/stream_processor/dal/events/providers/__init__.py b/product/stream_processor/dal/events/providers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py new file mode 100644 index 0000000..8c4dc47 --- /dev/null +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -0,0 +1,36 @@ +import json +from typing import TYPE_CHECKING, Any, Optional + +import boto3 + +from product.stream_processor.dal.events.base import EventProvider + +if TYPE_CHECKING: + from mypy_boto3_events import EventBridgeClient + from mypy_boto3_events.type_defs import PutEventsRequestEntryTypeDef + + +class EventBridge(EventProvider): + + def __init__(self, bus_name: str, client: Optional['EventBridgeClient'] = None): + self.bus_name = bus_name + self.client = client or boto3.client('events') + + # NOTE: missing input model that always expect a standard event like data + metadata + def send(self, payload: list[dict[str, Any]]): + event: 'PutEventsRequestEntryTypeDef' = { + 'Source': 'myorg.myservice', + 'DetailType': 'event_type.version', + 'Detail': json.dumps(payload), + 'EventBusName': self.bus_name, + 'TraceHeader': '', + } + + result = self.client.put_events(Entries=[event]) + + # Temporary until we create a model for our DAL (EventReceipt) + return { + 'success': result['FailedEntryCount'] == 0, + 'failed_entries': result['Entries'], + 'request_id': result['ResponseMetadata']['RequestId'] + } diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index 8d22aca..c767c70 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -1,10 +1,14 @@ -from typing import TypeVar +import os from product.models.products.product import ProductNotification +from product.stream_processor.dal.events.event_handler import ProductNotificationHandler +from product.stream_processor.dal.events.providers.eventbridge import EventBridge -# Until DAL gets created -EventHandler = TypeVar('EventHandler') +EVENT_BUS = os.environ.get('EVENT_BUS', '') -def notify_product_updates(update: list[ProductNotification], event_handler: EventHandler | None = None): - return update +def notify_product_updates(update: list[ProductNotification], event_handler: ProductNotificationHandler | None = None): + if event_handler is None: + event_handler = ProductNotificationHandler(provider=EventBridge(EVENT_BUS)) + + return event_handler.emit(payload=update) diff --git a/product/stream_processor/handlers/process_stream.py b/product/stream_processor/handlers/process_stream.py index 1d1877e..5fb3570 100644 --- a/product/stream_processor/handlers/process_stream.py +++ b/product/stream_processor/handlers/process_stream.py @@ -1,22 +1,21 @@ -from typing import Any, TypeVar +from typing import Any from aws_lambda_powertools import Logger from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBStreamEvent from aws_lambda_powertools.utilities.typing import LambdaContext from product.models.products.product import ProductNotification +from product.stream_processor.dal.events.event_handler import ProductNotificationHandler from product.stream_processor.domain_logic.product_notification import notify_product_updates logger = Logger() -# Until DAL gets created -EventHandler = TypeVar('EventHandler') @logger.inject_lambda_context(log_event=True) def process_stream( event: dict[str, Any], context: LambdaContext, - event_handler: EventHandler | None = None, + event_handler: ProductNotificationHandler | None = None, ) -> list[ProductNotification]: # Until we create our handler product stream change input stream_records = DynamoDBStreamEvent(event) diff --git a/tests/unit/stream_processor/test_process_stream_handler.py b/tests/unit/stream_processor/test_process_stream_handler.py index fe32114..8bef18d 100644 --- a/tests/unit/stream_processor/test_process_stream_handler.py +++ b/tests/unit/stream_processor/test_process_stream_handler.py @@ -1,17 +1,48 @@ -import uuid +from typing import Any +from product.models.products.product import ProductNotification +from product.stream_processor.dal.events.base import EventHandler from product.stream_processor.handlers.process_stream import process_stream from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events from tests.utils import generate_context +class FakeEventHandler(EventHandler): + + def __init__(self): + self.published_events = [] + + def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None): + metadata = metadata or {} + for product in payload: + self.published_events.append({'event': product, 'metadata': metadata}) + + def __len__(self): + return len(self.published_events) + + def test_process_stream_notify_product_updates(): - # GIVEN - product_id = f'{uuid.uuid4()}' - events = generate_dynamodb_stream_events(product_id=product_id) + # GIVEN a DynamoDB stream event and a fake event handler + event = generate_dynamodb_stream_events() + event_store = FakeEventHandler() + + # WHEN process_stream is called with a custom event handler + process_stream(event=event, context=generate_context(), event_handler=event_store) + + # THEN the fake event handler should have received the correct number of events + # and no errors should have been raised (e.g., no sockets, no DAL calls) + assert len(event['Records']) == len(event_store) + + +# NOTE: this should fail once we have schema validation +def test_process_stream_with_empty_records(): + # GIVEN an empty DynamoDB stream event + event = {'Records': []} + event_store = FakeEventHandler() - # WHEN - ret = process_stream(events, generate_context()) + # WHEN process_stream is called with a custom event handler + process_stream(event=event, context=generate_context(), event_handler=event_store) - # THEN - assert all(product.product_id == product_id for product in ret) + # THEN the fake event handler should have received no events + # and no errors should have been raised + assert len(event_store) == 0 From 6c746bf89b3675fb91346bffae6c716ad926ca91 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 1 Oct 2023 20:28:36 +0200 Subject: [PATCH 20/58] refactor: use status field over change_status since it's a notification already Signed-off-by: heitorlessa --- product/models/products/product.py | 2 +- product/stream_processor/handlers/process_stream.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/product/models/products/product.py b/product/models/products/product.py index a38abb6..092537b 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -17,5 +17,5 @@ class Product(BaseModel): class ProductNotification(BaseModel): product_id: ProductId - change_status: Literal['ADDED', 'REMOVED', 'UPDATED'] + status: Literal['ADDED', 'REMOVED', 'UPDATED'] created_at: datetime = Field(default_factory=datetime.utcnow) diff --git a/product/stream_processor/handlers/process_stream.py b/product/stream_processor/handlers/process_stream.py index 5fb3570..a8c4613 100644 --- a/product/stream_processor/handlers/process_stream.py +++ b/product/stream_processor/handlers/process_stream.py @@ -26,10 +26,10 @@ def process_stream( match record.event_name: case record.event_name.INSERT: # type: ignore[union-attr] - product_updates.append(ProductNotification(product_id=product_id, change_status='ADDED')) + product_updates.append(ProductNotification(product_id=product_id, status='ADDED')) case record.event_name.MODIFY: # type: ignore[union-attr] - product_updates.append(ProductNotification(product_id=product_id, change_status='UPDATED')) + product_updates.append(ProductNotification(product_id=product_id, status='UPDATED')) case record.event_name.REMOVE: # type: ignore[union-attr] - product_updates.append(ProductNotification(product_id=product_id, change_status='REMOVED')) + product_updates.append(ProductNotification(product_id=product_id, status='REMOVED')) return notify_product_updates(update=product_updates, event_handler=event_handler) From 689e09e40dfb95204c0fc1cca5ebb41b2111963e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 1 Oct 2023 20:54:22 +0200 Subject: [PATCH 21/58] refactor: move test doubles and fixtures to conftest Signed-off-by: heitorlessa --- tests/unit/stream_processor/conftest.py | 34 +++++++++++++++++ .../test_process_stream_handler.py | 38 ++++--------------- 2 files changed, 42 insertions(+), 30 deletions(-) create mode 100644 tests/unit/stream_processor/conftest.py diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py new file mode 100644 index 0000000..f0c55bf --- /dev/null +++ b/tests/unit/stream_processor/conftest.py @@ -0,0 +1,34 @@ +from typing import Any, Generator, TypeVar + +import pytest + +from product.models.products.product import ProductNotification +from product.stream_processor.dal.events.event_handler import ProductNotificationHandler +from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events, generate_product_notifications + +T = TypeVar('T') +Fixture = Generator[T, None, None] + + +class FakeEventHandler(ProductNotificationHandler): + + def __init__(self): + self.published_events = [] + + def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None): + metadata = metadata or {} + for product in payload: + self.published_events.append({'event': product, 'metadata': metadata}) + + def __len__(self): + return len(self.published_events) + + +@pytest.fixture +def dynamodb_stream_events() -> Fixture[dict[str, Any]]: + yield generate_dynamodb_stream_events() + + +@pytest.fixture +def event_store() -> Fixture[FakeEventHandler]: + yield FakeEventHandler() diff --git a/tests/unit/stream_processor/test_process_stream_handler.py b/tests/unit/stream_processor/test_process_stream_handler.py index 8bef18d..8805ab2 100644 --- a/tests/unit/stream_processor/test_process_stream_handler.py +++ b/tests/unit/stream_processor/test_process_stream_handler.py @@ -1,48 +1,26 @@ -from typing import Any - -from product.models.products.product import ProductNotification -from product.stream_processor.dal.events.base import EventHandler from product.stream_processor.handlers.process_stream import process_stream -from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events +from tests.unit.stream_processor.conftest import FakeEventHandler from tests.utils import generate_context -class FakeEventHandler(EventHandler): - - def __init__(self): - self.published_events = [] - - def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None): - metadata = metadata or {} - for product in payload: - self.published_events.append({'event': product, 'metadata': metadata}) - - def __len__(self): - return len(self.published_events) - - -def test_process_stream_notify_product_updates(): +def test_process_stream_notify_product_updates(dynamodb_stream_events: dict, event_store: FakeEventHandler): # GIVEN a DynamoDB stream event and a fake event handler - event = generate_dynamodb_stream_events() - event_store = FakeEventHandler() - # WHEN process_stream is called with a custom event handler - process_stream(event=event, context=generate_context(), event_handler=event_store) + process_stream(event=dynamodb_stream_events, context=generate_context(), event_handler=event_store) - # THEN the fake event handler should have received the correct number of events + # THEN the fake event handler should emit these product notifications # and no errors should have been raised (e.g., no sockets, no DAL calls) - assert len(event['Records']) == len(event_store) + assert len(dynamodb_stream_events['Records']) == len(event_store) # NOTE: this should fail once we have schema validation -def test_process_stream_with_empty_records(): +def test_process_stream_with_empty_records(event_store: FakeEventHandler): # GIVEN an empty DynamoDB stream event - event = {'Records': []} - event_store = FakeEventHandler() + event: dict[str, list] = {'Records': []} # WHEN process_stream is called with a custom event handler process_stream(event=event, context=generate_context(), event_handler=event_store) - # THEN the fake event handler should have received no events + # THEN the fake event handler should emit these product notifications # and no errors should have been raised assert len(event_store) == 0 From 013b608884f4b8cd35790c4a7132e227258a7da9 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 1 Oct 2023 20:56:29 +0200 Subject: [PATCH 22/58] chore: test product_notifications Signed-off-by: heitorlessa --- tests/unit/stream_processor/conftest.py | 21 +++++++++++++++++-- tests/unit/stream_processor/data_builder.py | 11 ++++++++++ .../test_product_notification.py | 13 ++++++++++++ 3 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 tests/unit/stream_processor/test_product_notification.py diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index f0c55bf..62b5c55 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -1,4 +1,4 @@ -from typing import Any, Generator, TypeVar +from typing import Any, Generator, TypedDict, TypeVar import pytest @@ -10,19 +10,31 @@ Fixture = Generator[T, None, None] +class FakePublishedEvent(TypedDict): + event: ProductNotification + metadata: dict[str, Any] + + class FakeEventHandler(ProductNotificationHandler): def __init__(self): - self.published_events = [] + self.published_events: list[FakePublishedEvent] = [] def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None): metadata = metadata or {} for product in payload: self.published_events.append({'event': product, 'metadata': metadata}) + @property + def published_notifications(self) -> list[ProductNotification]: + return [notification['event'] for notification in self.published_events] + def __len__(self): return len(self.published_events) + def __contains__(self, item: ProductNotification): + return item in self.published_notifications + @pytest.fixture def dynamodb_stream_events() -> Fixture[dict[str, Any]]: @@ -32,3 +44,8 @@ def dynamodb_stream_events() -> Fixture[dict[str, Any]]: @pytest.fixture def event_store() -> Fixture[FakeEventHandler]: yield FakeEventHandler() + + +@pytest.fixture +def product_notifications() -> Fixture[list[ProductNotification]]: + yield generate_product_notifications() diff --git a/tests/unit/stream_processor/data_builder.py b/tests/unit/stream_processor/data_builder.py index b48ce3e..e73a355 100644 --- a/tests/unit/stream_processor/data_builder.py +++ b/tests/unit/stream_processor/data_builder.py @@ -2,6 +2,9 @@ import random import time from typing import Any +from uuid import uuid4 + +from product.models.products.product import ProductNotification def generate_dynamodb_stream_events(product_id: str = '8c18c85a-0f10-4b73-b54a-07ab0d381018',) -> dict[str, Any]: @@ -81,3 +84,11 @@ def generate_dynamodb_stream_events(product_id: str = '8c18c85a-0f10-4b73-b54a-0 }, ] } + + +def generate_product_notifications(product_id: str = '') -> list[ProductNotification]: + product_id = product_id or f'{uuid4()}' + return [ + ProductNotification(product_id=product_id, status=random.choice(['ADDED', 'REMOVED', 'UPDATED'])), + ProductNotification(product_id=product_id, status=random.choice(['ADDED', 'REMOVED', 'UPDATED'])), + ] diff --git a/tests/unit/stream_processor/test_product_notification.py b/tests/unit/stream_processor/test_product_notification.py new file mode 100644 index 0000000..fae3af0 --- /dev/null +++ b/tests/unit/stream_processor/test_product_notification.py @@ -0,0 +1,13 @@ +from product.models.products.product import ProductNotification +from product.stream_processor.domain_logic.product_notification import notify_product_updates +from tests.unit.stream_processor.conftest import FakeEventHandler + + +def test_product_notifications_are_emitted(product_notifications: list[ProductNotification], event_store: FakeEventHandler): + # GIVEN a list of Product Notifications and a fake event handler + # WHEN the product notifications are processed + notify_product_updates(update=product_notifications, event_handler=event_store) + + # THEN the fake event handler should emit these product notifications + assert len(event_store) == len(product_notifications) + assert all(notification in event_store for notification in product_notifications) From c34035240394d6166ee4ec96f7735ce191b9e6c2 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 2 Oct 2023 10:49:52 +0200 Subject: [PATCH 23/58] chore: add EventReceipt output model --- product/stream_processor/dal/events/base.py | 6 ++-- .../dal/events/event_handler.py | 3 +- .../stream_processor/dal/events/exceptions.py | 2 ++ .../{schemas => events/models}/__init__.py | 0 .../dal/events/models/output.py | 16 +++++++++ .../dal/events/providers/eventbridge.py | 34 ++++++++++++++----- 6 files changed, 50 insertions(+), 11 deletions(-) create mode 100644 product/stream_processor/dal/events/exceptions.py rename product/stream_processor/dal/{schemas => events/models}/__init__.py (100%) create mode 100644 product/stream_processor/dal/events/models/output.py diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index d849e2a..a354322 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -1,6 +1,8 @@ from abc import ABC, abstractmethod from typing import Any, Generic, TypeVar +from product.stream_processor.dal.events.models.output import EventReceipt + T = TypeVar('T') R = TypeVar('R') @@ -14,9 +16,9 @@ def send(self, payload: T) -> R: class EventHandler(ABC, Generic[T, R]): - def __init__(self, emitter: EventProvider) -> None: + def __init__(self, provider: EventProvider) -> None: ... @abstractmethod - def emit(self, payload: list[T], metadata: dict[str, Any] | None = None) -> R: + def emit(self, payload: list[T], metadata: dict[str, Any] | None = None) -> EventReceipt: ... diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index 67b77de..2485520 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -2,6 +2,7 @@ from product.models.products.product import ProductNotification from product.stream_processor.dal.events.base import EventHandler, EventProvider +from product.stream_processor.dal.events.models.output import EventReceipt class ProductNotificationHandler(EventHandler): @@ -9,6 +10,6 @@ class ProductNotificationHandler(EventHandler): def __init__(self, provider: EventProvider) -> None: self.provider = provider - def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None) -> dict[str, Any]: + def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None) -> EventReceipt: metadata = metadata or {} return self.provider.send(payload) diff --git a/product/stream_processor/dal/events/exceptions.py b/product/stream_processor/dal/events/exceptions.py new file mode 100644 index 0000000..89c7c15 --- /dev/null +++ b/product/stream_processor/dal/events/exceptions.py @@ -0,0 +1,2 @@ +class ProductNotificationDeliveryError(Exception): + pass diff --git a/product/stream_processor/dal/schemas/__init__.py b/product/stream_processor/dal/events/models/__init__.py similarity index 100% rename from product/stream_processor/dal/schemas/__init__.py rename to product/stream_processor/dal/events/models/__init__.py diff --git a/product/stream_processor/dal/events/models/output.py b/product/stream_processor/dal/events/models/output.py new file mode 100644 index 0000000..eed3a59 --- /dev/null +++ b/product/stream_processor/dal/events/models/output.py @@ -0,0 +1,16 @@ +from pydantic import BaseModel + + +class EventReceiptSuccessfulNotification(BaseModel): + receipt_id: str + + +class EventReceiptUnsuccessfulNotification(BaseModel): + receipt_id: str + error: str + details: dict + + +class EventReceipt(BaseModel): + successful_notifications: list[EventReceiptSuccessfulNotification] + unsuccessful_notifications: list[EventReceiptUnsuccessfulNotification] diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index 8c4dc47..bb5b6b7 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -3,7 +3,9 @@ import boto3 -from product.stream_processor.dal.events.base import EventProvider +from product.stream_processor.dal.events.base import EventProvider, T +from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError +from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccessfulNotification, EventReceiptUnsuccessfulNotification if TYPE_CHECKING: from mypy_boto3_events import EventBridgeClient @@ -17,7 +19,7 @@ def __init__(self, bus_name: str, client: Optional['EventBridgeClient'] = None): self.client = client or boto3.client('events') # NOTE: missing input model that always expect a standard event like data + metadata - def send(self, payload: list[dict[str, Any]]): + def send(self, payload: T) -> EventReceipt: event: 'PutEventsRequestEntryTypeDef' = { 'Source': 'myorg.myservice', 'DetailType': 'event_type.version', @@ -28,9 +30,25 @@ def send(self, payload: list[dict[str, Any]]): result = self.client.put_events(Entries=[event]) - # Temporary until we create a model for our DAL (EventReceipt) - return { - 'success': result['FailedEntryCount'] == 0, - 'failed_entries': result['Entries'], - 'request_id': result['ResponseMetadata']['RequestId'] - } + successful_requests, unsuccessful_requests = self._collect_receipts(result) + has_failed_entries = result['FailedEntryCount'] >= 0 + + if has_failed_entries: + # NOTE: Improve this error by correlating which entry failed to send. + # We will fail regardless, but it'll be useful for logging and correlation later on. + raise ProductNotificationDeliveryError(f'Failed to deliver {len(unsuccessful_requests)} events') + + return EventReceipt(successful_notifications=successful_requests, unsuccessful_notifications=unsuccessful_requests) + + @staticmethod + def _collect_receipts(result) -> tuple[list[EventReceiptSuccessfulNotification], list[EventReceiptUnsuccessfulNotification]]: + successful_requests: list[EventReceiptSuccessfulNotification] = [] + unsuccessful_requests: list[EventReceiptUnsuccessfulNotification] = [] + for receipt in result['Entries']: + if receipt['ErrorMessage']: + unsuccessful_requests.append( + EventReceiptUnsuccessfulNotification(receipt_id=receipt['EventId'], error=receipt['ErrorMessage'], + details={'error_code': receipt['ErrorCode']})) + else: + successful_requests.append(EventReceiptSuccessfulNotification(receipt_id=receipt['EventId'])) + return successful_requests, unsuccessful_requests From 1dd1fb0e39faaa5da732c7b800ae10f97595dbbc Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 2 Oct 2023 10:50:12 +0200 Subject: [PATCH 24/58] chore: ignore .idea dir --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 5b8a414..66493f6 100644 --- a/.gitignore +++ b/.gitignore @@ -252,3 +252,4 @@ lambda_requirements.txt # Misc node_modules +.idea From c38a33c61d068b5acc634aae1884fcb27ddeb9f2 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 2 Oct 2023 12:31:41 +0200 Subject: [PATCH 25/58] chore: create and use Event model --- product/constants.py | 1 + product/models/products/product.py | 8 ++++- product/stream_processor/dal/events/base.py | 10 +++--- .../dal/events/event_handler.py | 20 +++++++++++- .../dal/events/models/input.py | 19 +++++++++++ .../dal/events/providers/eventbridge.py | 32 +++++++++++-------- 6 files changed, 70 insertions(+), 20 deletions(-) create mode 100644 product/constants.py create mode 100644 product/stream_processor/dal/events/models/input.py diff --git a/product/constants.py b/product/constants.py new file mode 100644 index 0000000..874f653 --- /dev/null +++ b/product/constants.py @@ -0,0 +1 @@ +XRAY_TRACE_ID_ENV: str = '_X_AMZN_TRACE_ID' diff --git a/product/models/products/product.py b/product/models/products/product.py index 092537b..0e6dee4 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Annotated, Literal +from typing import Annotated, Literal, ClassVar from pydantic import BaseModel, Field, PositiveInt from pydantic.functional_validators import AfterValidator @@ -19,3 +19,9 @@ class ProductNotification(BaseModel): product_id: ProductId status: Literal['ADDED', 'REMOVED', 'UPDATED'] created_at: datetime = Field(default_factory=datetime.utcnow) + + # NOTE: consider whether this is the best place. + # at best, keeping it close to the model it's easier to detect schema or breaking changes + # these are not serialized when using dict(), model_dump(), or model_dump_json() + event_name: ClassVar[str] = 'PRODUCT_CHANGE_NOTIFICATION' + event_version: ClassVar[str] = 'v1' diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index a354322..a8700f8 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -1,20 +1,20 @@ from abc import ABC, abstractmethod -from typing import Any, Generic, TypeVar +from typing import Any, Generic, TypeVar, Sequence from product.stream_processor.dal.events.models.output import EventReceipt +from product.stream_processor.dal.events.models.input import Event T = TypeVar('T') -R = TypeVar('R') -class EventProvider(ABC, Generic[T, R]): +class EventProvider(ABC): @abstractmethod - def send(self, payload: T) -> R: + def send(self, payload: Sequence[Event]) -> EventReceipt: ... -class EventHandler(ABC, Generic[T, R]): +class EventHandler(ABC, Generic[T]): def __init__(self, provider: EventProvider) -> None: ... diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index 2485520..484ad8c 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -1,15 +1,33 @@ from typing import Any +from uuid import uuid4 from product.models.products.product import ProductNotification from product.stream_processor.dal.events.base import EventHandler, EventProvider from product.stream_processor.dal.events.models.output import EventReceipt +from product.stream_processor.dal.events.models.input import Event, EventMetadata class ProductNotificationHandler(EventHandler): + EVENT_SOURCE = 'myorg.product.product_notification' def __init__(self, provider: EventProvider) -> None: self.provider = provider def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None) -> EventReceipt: metadata = metadata or {} - return self.provider.send(payload) + correlation_id = f'{uuid4()}' # we want the same correlation ID for the batch + + event_payload = [ + Event( + data=notification.to_dict(), + metadata=EventMetadata( + event_type=notification.event_name, + event_source=self.EVENT_SOURCE, + event_version=notification.event_version, + correlation_id=correlation_id, + ) + ) + for notification in payload + ] + + return self.provider.send(event_payload) diff --git a/product/stream_processor/dal/events/models/input.py b/product/stream_processor/dal/events/models/input.py new file mode 100644 index 0000000..d0d229f --- /dev/null +++ b/product/stream_processor/dal/events/models/input.py @@ -0,0 +1,19 @@ +from datetime import datetime + +from aws_lambda_powertools import Logger +from pydantic import BaseModel, Field + +logger = Logger() + + +class EventMetadata(BaseModel): + event_name: str + event_source: str + event_version: str + correlation_id: str + created_at: datetime = Field(default_factory=datetime.utcnow) + + +class Event(BaseModel): + data: BaseModel + metadata: EventMetadata diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index bb5b6b7..760e750 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -1,10 +1,12 @@ -import json -from typing import TYPE_CHECKING, Any, Optional +import os +from typing import TYPE_CHECKING, Optional import boto3 -from product.stream_processor.dal.events.base import EventProvider, T +from product.constants import XRAY_TRACE_ID_ENV +from product.stream_processor.dal.events.base import EventProvider from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError +from product.stream_processor.dal.events.models.input import Event from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccessfulNotification, EventReceiptUnsuccessfulNotification if TYPE_CHECKING: @@ -19,16 +21,20 @@ def __init__(self, bus_name: str, client: Optional['EventBridgeClient'] = None): self.client = client or boto3.client('events') # NOTE: missing input model that always expect a standard event like data + metadata - def send(self, payload: T) -> EventReceipt: - event: 'PutEventsRequestEntryTypeDef' = { - 'Source': 'myorg.myservice', - 'DetailType': 'event_type.version', - 'Detail': json.dumps(payload), - 'EventBusName': self.bus_name, - 'TraceHeader': '', - } - - result = self.client.put_events(Entries=[event]) + def send(self, payload: list[Event]) -> EventReceipt: + events: list['PutEventsRequestEntryTypeDef'] = [] + + # NOTE: 'Time' field is not included to be able to measure end-to-end latency later (time - created_at) + for event in payload: + events.append({ + 'Source': event.metadata.event_source, + 'DetailType': f'{event.metadata.event_name}.{event.metadata.event_version}', + 'Detail': event.model_dump_json(), + 'EventBusName': self.bus_name, + 'TraceHeader': os.environ.get(XRAY_TRACE_ID_ENV, ''), + }) + + result = self.client.put_events(Entries=events) successful_requests, unsuccessful_requests = self._collect_receipts(result) has_failed_entries = result['FailedEntryCount'] >= 0 From d2bd0b72dc73f00a3814be78cc561f5cb82636d6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 2 Oct 2023 17:10:59 +0200 Subject: [PATCH 26/58] chore: use generic container for emit Signed-off-by: heitorlessa --- product/stream_processor/dal/events/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index a8700f8..3781e7a 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -20,5 +20,5 @@ def __init__(self, provider: EventProvider) -> None: ... @abstractmethod - def emit(self, payload: list[T], metadata: dict[str, Any] | None = None) -> EventReceipt: + def emit(self, payload: Sequence[T], metadata: dict[str, Any] | None = None) -> EventReceipt: ... From fa546f0855a4be97b457bf5371a389e1740a47bf Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 2 Oct 2023 21:29:30 +0200 Subject: [PATCH 27/58] chore: fix event serialization; cleanup Signed-off-by: heitorlessa --- product/models/products/product.py | 3 +- product/stream_processor/dal/events/base.py | 4 +-- .../dal/events/event_handler.py | 18 ++++------ .../dal/events/models/input.py | 8 +++-- .../dal/events/providers/eventbridge.py | 34 +++++++++++-------- 5 files changed, 36 insertions(+), 31 deletions(-) diff --git a/product/models/products/product.py b/product/models/products/product.py index 0e6dee4..ae47e72 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Annotated, Literal, ClassVar +from typing import Annotated, ClassVar, Literal from pydantic import BaseModel, Field, PositiveInt from pydantic.functional_validators import AfterValidator @@ -25,3 +25,4 @@ class ProductNotification(BaseModel): # these are not serialized when using dict(), model_dump(), or model_dump_json() event_name: ClassVar[str] = 'PRODUCT_CHANGE_NOTIFICATION' event_version: ClassVar[str] = 'v1' + event_source: ClassVar[str] = 'myorg.product.product_notification' diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index 3781e7a..2ed42d8 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -1,8 +1,8 @@ from abc import ABC, abstractmethod -from typing import Any, Generic, TypeVar, Sequence +from typing import Any, Generic, Sequence, TypeVar -from product.stream_processor.dal.events.models.output import EventReceipt from product.stream_processor.dal.events.models.input import Event +from product.stream_processor.dal.events.models.output import EventReceipt T = TypeVar('T') diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index 484ad8c..3cf8708 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -3,31 +3,27 @@ from product.models.products.product import ProductNotification from product.stream_processor.dal.events.base import EventHandler, EventProvider -from product.stream_processor.dal.events.models.output import EventReceipt from product.stream_processor.dal.events.models.input import Event, EventMetadata +from product.stream_processor.dal.events.models.output import EventReceipt class ProductNotificationHandler(EventHandler): - EVENT_SOURCE = 'myorg.product.product_notification' def __init__(self, provider: EventProvider) -> None: self.provider = provider def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None) -> EventReceipt: metadata = metadata or {} - correlation_id = f'{uuid4()}' # we want the same correlation ID for the batch + correlation_id = f'{uuid4()}' # we want the same correlation ID for the batch; use logger correlation ID later + # NOTE: this will be generic for all events later, we can easily make it reusable + # also consider a method to build event from payload event_payload = [ Event( data=notification.to_dict(), - metadata=EventMetadata( - event_type=notification.event_name, - event_source=self.EVENT_SOURCE, - event_version=notification.event_version, - correlation_id=correlation_id, - ) - ) + metadata=EventMetadata(event_type=notification.event_name, event_source=notification.event_source, + event_version=notification.event_version, correlation_id=correlation_id, **metadata)) for notification in payload ] - return self.provider.send(event_payload) + return self.provider.send(payload=event_payload) diff --git a/product/stream_processor/dal/events/models/input.py b/product/stream_processor/dal/events/models/input.py index d0d229f..bf52c45 100644 --- a/product/stream_processor/dal/events/models/input.py +++ b/product/stream_processor/dal/events/models/input.py @@ -1,8 +1,10 @@ from datetime import datetime +from typing import TypeVar from aws_lambda_powertools import Logger -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field +AnyModel = TypeVar('AnyModel', bound=BaseModel) logger = Logger() @@ -13,7 +15,9 @@ class EventMetadata(BaseModel): correlation_id: str created_at: datetime = Field(default_factory=datetime.utcnow) + model_config = ConfigDict(extra='allow') + class Event(BaseModel): - data: BaseModel + data: AnyModel metadata: EventMetadata diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index 760e750..cf1356a 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -11,7 +11,7 @@ if TYPE_CHECKING: from mypy_boto3_events import EventBridgeClient - from mypy_boto3_events.type_defs import PutEventsRequestEntryTypeDef + from mypy_boto3_events.type_defs import PutEventsRequestEntryTypeDef, PutEventsResponseTypeDef class EventBridge(EventProvider): @@ -20,11 +20,17 @@ def __init__(self, bus_name: str, client: Optional['EventBridgeClient'] = None): self.bus_name = bus_name self.client = client or boto3.client('events') - # NOTE: missing input model that always expect a standard event like data + metadata def send(self, payload: list[Event]) -> EventReceipt: + events = self.build_put_events_request(payload) + result = self.client.put_events(Entries=events) + + successful_requests, unsuccessful_requests = self._collect_receipts(result) + return EventReceipt(successful_notifications=successful_requests, unsuccessful_notifications=unsuccessful_requests) + + def build_put_events_request(self, payload: list[Event]) -> list['PutEventsRequestEntryTypeDef']: events: list['PutEventsRequestEntryTypeDef'] = [] - # NOTE: 'Time' field is not included to be able to measure end-to-end latency later (time - created_at) + # 'Time' field is not included to be able to measure end-to-end latency later (time - created_at) for event in payload: events.append({ 'Source': event.metadata.event_source, @@ -34,22 +40,14 @@ def send(self, payload: list[Event]) -> EventReceipt: 'TraceHeader': os.environ.get(XRAY_TRACE_ID_ENV, ''), }) - result = self.client.put_events(Entries=events) - - successful_requests, unsuccessful_requests = self._collect_receipts(result) - has_failed_entries = result['FailedEntryCount'] >= 0 - - if has_failed_entries: - # NOTE: Improve this error by correlating which entry failed to send. - # We will fail regardless, but it'll be useful for logging and correlation later on. - raise ProductNotificationDeliveryError(f'Failed to deliver {len(unsuccessful_requests)} events') - - return EventReceipt(successful_notifications=successful_requests, unsuccessful_notifications=unsuccessful_requests) + return events @staticmethod - def _collect_receipts(result) -> tuple[list[EventReceiptSuccessfulNotification], list[EventReceiptUnsuccessfulNotification]]: + def _collect_receipts( + result: 'PutEventsResponseTypeDef') -> tuple[list[EventReceiptSuccessfulNotification], list[EventReceiptUnsuccessfulNotification]]: successful_requests: list[EventReceiptSuccessfulNotification] = [] unsuccessful_requests: list[EventReceiptUnsuccessfulNotification] = [] + for receipt in result['Entries']: if receipt['ErrorMessage']: unsuccessful_requests.append( @@ -57,4 +55,10 @@ def _collect_receipts(result) -> tuple[list[EventReceiptSuccessfulNotification], details={'error_code': receipt['ErrorCode']})) else: successful_requests.append(EventReceiptSuccessfulNotification(receipt_id=receipt['EventId'])) + + # NOTE: Improve this error by correlating which entry failed to send. + # We will fail regardless, but it'll be useful for logging and correlation later on. + if result['FailedEntryCount'] >= 0: + raise ProductNotificationDeliveryError(f'Failed to deliver {len(unsuccessful_requests)} events') + return successful_requests, unsuccessful_requests From 2723fbf402aadabd4194b2675b6c42fbad95f17a Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 2 Oct 2023 21:32:07 +0200 Subject: [PATCH 28/58] chore: future note for event slicing Signed-off-by: heitorlessa --- product/stream_processor/dal/events/providers/eventbridge.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index cf1356a..64439f3 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -22,6 +22,8 @@ def __init__(self, bus_name: str, client: Optional['EventBridgeClient'] = None): def send(self, payload: list[Event]) -> EventReceipt: events = self.build_put_events_request(payload) + + # NOTE: we need a generator that will slice up to 10 event entries result = self.client.put_events(Entries=events) successful_requests, unsuccessful_requests = self._collect_receipts(result) From aa758447239a3116fdb16462ea93d9e30fa43e85 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 13:29:16 +0200 Subject: [PATCH 29/58] chore: disable sockets for unit test Signed-off-by: heitorlessa --- poetry.lock | 16 +++++++++++++++- pyproject.toml | 1 + tests/unit/stream_processor/conftest.py | 7 +++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 07a90eb..c7a72d8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1422,6 +1422,20 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-socket" +version = "0.6.0" +description = "Pytest Plugin to disable socket calls during tests" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "pytest_socket-0.6.0-py3-none-any.whl", hash = "sha256:cca72f134ff01e0023c402e78d31b32e68da3efdf3493bf7788f8eba86a6824c"}, + {file = "pytest_socket-0.6.0.tar.gz", hash = "sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138"}, +] + +[package.dependencies] +pytest = ">=3.6.3" + [[package]] name = "python-dateutil" version = "2.8.2" @@ -1997,4 +2011,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11.0" -content-hash = "df5d8f387315a036546dc403ac7cac7cbd4d845336231390548c3ed488c039bc" +content-hash = "50159324a6c0b3a5e03ac842690fda7c227cf3a4c5f1704a07861ab8a9fd0861" diff --git a/pyproject.toml b/pyproject.toml index 88aa45c..6383955 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ toml = "*" [tool.poetry.group.dev.dependencies] mypy-boto3-events = "^1.28.46" +pytest-socket = "^0.6.0" [tool.isort] py_version = 311 diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index 62b5c55..7e91dec 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -5,6 +5,13 @@ from product.models.products.product import ProductNotification from product.stream_processor.dal.events.event_handler import ProductNotificationHandler from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events, generate_product_notifications +from pytest_socket import disable_socket + + +def pytest_runtest_setup(): + """Disable Unix and TCP sockets for Data masking tests""" + disable_socket() + T = TypeVar('T') Fixture = Generator[T, None, None] From e9f3aa425fa6bea235a9d222a73d7d2dc9714f23 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 13:30:02 +0200 Subject: [PATCH 30/58] chore: add eventbridge provider test skeleton Signed-off-by: heitorlessa --- .../test_eventbridge_provider.py | 73 +++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 tests/unit/stream_processor/test_eventbridge_provider.py diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py new file mode 100644 index 0000000..0803932 --- /dev/null +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -0,0 +1,73 @@ +from datetime import datetime +from typing import ClassVar + +import pytest +from pydantic import BaseModel + +from product.constants import XRAY_TRACE_ID_ENV +from product.stream_processor.dal.events.models.input import Event, EventMetadata +from product.stream_processor.dal.events.providers.eventbridge import EventBridge + + +def test_eventbridge_build_put_events_from_event_payload(): + # GIVEN + class SampleNotification(BaseModel): + message: str + + event_source: ClassVar[str] = 'test' + event_name: ClassVar[str] = 'sample' + event_version: ClassVar[str] = 'v1' + + event_bus_name = 'sample_bus' + notification = SampleNotification(message='test') + + event = Event( + data=notification, + metadata=EventMetadata( + event_name=SampleNotification.event_name, event_source=SampleNotification.event_source, + event_version=SampleNotification.event_version, correlation_id='test' + ) + ) + + # WHEN + event_provider = EventBridge(bus_name=event_bus_name) + request = event_provider.build_put_events_request(payload=[event]) + + # THEN + entry = request[0] + assert entry['Source'] == event.metadata.event_source + assert entry['Detail'] == event.model_dump_json() + assert entry['DetailType'] == f'{event.metadata.event_name}.{event.metadata.event_version}' + assert entry['EventBusName'] == event_bus_name + + +def test_eventbridge_build_put_events_from_event_payload_include_trace_header(monkeypatch: pytest.MonkeyPatch): + # GIVEN + trace_id = '90835161-3067-47ba-8126-fda76dfdb0b0' + monkeypatch.setenv(XRAY_TRACE_ID_ENV, trace_id) + + class SampleNotification(BaseModel): + message: str + + event_source: ClassVar[str] = 'test' + event_name: ClassVar[str] = 'sample' + event_version: ClassVar[str] = 'v1' + + event_bus_name = 'sample_bus' + notification = SampleNotification(message='test') + + event = Event( + data=notification, + metadata=EventMetadata( + event_name=SampleNotification.event_name, event_source=SampleNotification.event_source, + event_version=SampleNotification.event_version, correlation_id='test' + ) + ) + + # WHEN + event_provider = EventBridge(bus_name=event_bus_name) + request = event_provider.build_put_events_request(payload=[event]) + + # THEN + entry = request[0] + assert entry['TraceHeader'] == trace_id From ab590c2936030ba2d56abcb7474e6c3866f50bd1 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 15:42:09 +0200 Subject: [PATCH 31/58] chore: change to ProductChangeNotification Signed-off-by: heitorlessa --- product/models/products/product.py | 9 ++------- .../domain_logic/product_notification.py | 8 ++++---- .../stream_processor/handlers/process_stream.py | 14 +++++++------- tests/unit/stream_processor/conftest.py | 16 ++++++++-------- tests/unit/stream_processor/data_builder.py | 8 ++++---- .../test_product_notification.py | 4 ++-- 6 files changed, 27 insertions(+), 32 deletions(-) diff --git a/product/models/products/product.py b/product/models/products/product.py index ae47e72..f530223 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -15,14 +15,9 @@ class Product(BaseModel): price: PositiveInt -class ProductNotification(BaseModel): +class ProductChangeNotification(BaseModel): product_id: ProductId status: Literal['ADDED', 'REMOVED', 'UPDATED'] created_at: datetime = Field(default_factory=datetime.utcnow) - # NOTE: consider whether this is the best place. - # at best, keeping it close to the model it's easier to detect schema or breaking changes - # these are not serialized when using dict(), model_dump(), or model_dump_json() - event_name: ClassVar[str] = 'PRODUCT_CHANGE_NOTIFICATION' - event_version: ClassVar[str] = 'v1' - event_source: ClassVar[str] = 'myorg.product.product_notification' + __version__: str = 'V1' diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index c767c70..3f0a03a 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -1,14 +1,14 @@ import os -from product.models.products.product import ProductNotification -from product.stream_processor.dal.events.event_handler import ProductNotificationHandler +from product.models.products.product import ProductChangeNotification +from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler from product.stream_processor.dal.events.providers.eventbridge import EventBridge EVENT_BUS = os.environ.get('EVENT_BUS', '') -def notify_product_updates(update: list[ProductNotification], event_handler: ProductNotificationHandler | None = None): +def notify_product_updates(update: list[ProductChangeNotification], event_handler: ProductChangeNotificationHandler | None = None): if event_handler is None: - event_handler = ProductNotificationHandler(provider=EventBridge(EVENT_BUS)) + event_handler = ProductChangeNotificationHandler(provider=EventBridge(EVENT_BUS)) return event_handler.emit(payload=update) diff --git a/product/stream_processor/handlers/process_stream.py b/product/stream_processor/handlers/process_stream.py index a8c4613..599979e 100644 --- a/product/stream_processor/handlers/process_stream.py +++ b/product/stream_processor/handlers/process_stream.py @@ -4,8 +4,8 @@ from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBStreamEvent from aws_lambda_powertools.utilities.typing import LambdaContext -from product.models.products.product import ProductNotification -from product.stream_processor.dal.events.event_handler import ProductNotificationHandler +from product.models.products.product import ProductChangeNotification +from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler from product.stream_processor.domain_logic.product_notification import notify_product_updates logger = Logger() @@ -15,8 +15,8 @@ def process_stream( event: dict[str, Any], context: LambdaContext, - event_handler: ProductNotificationHandler | None = None, -) -> list[ProductNotification]: + event_handler: ProductChangeNotificationHandler | None = None, +) -> list[ProductChangeNotification]: # Until we create our handler product stream change input stream_records = DynamoDBStreamEvent(event) @@ -26,10 +26,10 @@ def process_stream( match record.event_name: case record.event_name.INSERT: # type: ignore[union-attr] - product_updates.append(ProductNotification(product_id=product_id, status='ADDED')) + product_updates.append(ProductChangeNotification(product_id=product_id, status='ADDED')) case record.event_name.MODIFY: # type: ignore[union-attr] - product_updates.append(ProductNotification(product_id=product_id, status='UPDATED')) + product_updates.append(ProductChangeNotification(product_id=product_id, status='UPDATED')) case record.event_name.REMOVE: # type: ignore[union-attr] - product_updates.append(ProductNotification(product_id=product_id, status='REMOVED')) + product_updates.append(ProductChangeNotification(product_id=product_id, status='REMOVED')) return notify_product_updates(update=product_updates, event_handler=event_handler) diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index 7e91dec..b3384bb 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -2,8 +2,8 @@ import pytest -from product.models.products.product import ProductNotification -from product.stream_processor.dal.events.event_handler import ProductNotificationHandler +from product.models.products.product import ProductChangeNotification +from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events, generate_product_notifications from pytest_socket import disable_socket @@ -18,28 +18,28 @@ def pytest_runtest_setup(): class FakePublishedEvent(TypedDict): - event: ProductNotification + event: ProductChangeNotification metadata: dict[str, Any] -class FakeEventHandler(ProductNotificationHandler): +class FakeEventHandler(ProductChangeNotificationHandler): def __init__(self): self.published_events: list[FakePublishedEvent] = [] - def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None): + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None): metadata = metadata or {} for product in payload: self.published_events.append({'event': product, 'metadata': metadata}) @property - def published_notifications(self) -> list[ProductNotification]: + def published_notifications(self) -> list[ProductChangeNotification]: return [notification['event'] for notification in self.published_events] def __len__(self): return len(self.published_events) - def __contains__(self, item: ProductNotification): + def __contains__(self, item: ProductChangeNotification): return item in self.published_notifications @@ -54,5 +54,5 @@ def event_store() -> Fixture[FakeEventHandler]: @pytest.fixture -def product_notifications() -> Fixture[list[ProductNotification]]: +def product_notifications() -> Fixture[list[ProductChangeNotification]]: yield generate_product_notifications() diff --git a/tests/unit/stream_processor/data_builder.py b/tests/unit/stream_processor/data_builder.py index e73a355..effc1da 100644 --- a/tests/unit/stream_processor/data_builder.py +++ b/tests/unit/stream_processor/data_builder.py @@ -4,7 +4,7 @@ from typing import Any from uuid import uuid4 -from product.models.products.product import ProductNotification +from product.models.products.product import ProductChangeNotification def generate_dynamodb_stream_events(product_id: str = '8c18c85a-0f10-4b73-b54a-07ab0d381018',) -> dict[str, Any]: @@ -86,9 +86,9 @@ def generate_dynamodb_stream_events(product_id: str = '8c18c85a-0f10-4b73-b54a-0 } -def generate_product_notifications(product_id: str = '') -> list[ProductNotification]: +def generate_product_notifications(product_id: str = '') -> list[ProductChangeNotification]: product_id = product_id or f'{uuid4()}' return [ - ProductNotification(product_id=product_id, status=random.choice(['ADDED', 'REMOVED', 'UPDATED'])), - ProductNotification(product_id=product_id, status=random.choice(['ADDED', 'REMOVED', 'UPDATED'])), + ProductChangeNotification(product_id=product_id, status=random.choice(['ADDED', 'REMOVED', 'UPDATED'])), + ProductChangeNotification(product_id=product_id, status=random.choice(['ADDED', 'REMOVED', 'UPDATED'])), ] diff --git a/tests/unit/stream_processor/test_product_notification.py b/tests/unit/stream_processor/test_product_notification.py index fae3af0..aad4b08 100644 --- a/tests/unit/stream_processor/test_product_notification.py +++ b/tests/unit/stream_processor/test_product_notification.py @@ -1,9 +1,9 @@ -from product.models.products.product import ProductNotification +from product.models.products.product import ProductChangeNotification from product.stream_processor.domain_logic.product_notification import notify_product_updates from tests.unit.stream_processor.conftest import FakeEventHandler -def test_product_notifications_are_emitted(product_notifications: list[ProductNotification], event_store: FakeEventHandler): +def test_product_notifications_are_emitted(product_notifications: list[ProductChangeNotification], event_store: FakeEventHandler): # GIVEN a list of Product Notifications and a fake event handler # WHEN the product notifications are processed notify_product_updates(update=product_notifications, event_handler=event_store) From b90175f33bf5d0fff64c6eadaccad4305f0c5e79 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 15:45:46 +0200 Subject: [PATCH 32/58] chore: infer event structure from any model Signed-off-by: heitorlessa --- product/stream_processor/dal/events/base.py | 55 ++++++++++++++++-- .../dal/events/event_handler.py | 24 ++------ .../dal/events/providers/eventbridge.py | 2 +- .../test_eventbridge_provider.py | 58 +++++++------------ 4 files changed, 79 insertions(+), 60 deletions(-) diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index 2ed42d8..4206270 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -1,12 +1,28 @@ +import re from abc import ABC, abstractmethod from typing import Any, Generic, Sequence, TypeVar +from uuid import uuid4 -from product.stream_processor.dal.events.models.input import Event +from product.stream_processor.dal.events.models.input import Event, AnyModel, EventMetadata from product.stream_processor.dal.events.models.output import EventReceipt T = TypeVar('T') +# negative look ahead (?|char). Don't try to match the start of the string and any underscore that follows e.g., `_` and `__` +_exclude_underscores = r'(?!^)(? Product_Notification +# ProductNotificationV2 -> Product_Notification_V2 +# ProductHTTP -> Product_HTTP +_pascal_to_snake_pattern = re.compile(rf'({_exclude_underscores}{_pascal_case}{_or}{_followed_by_lower_case_or_digit}') + + class EventProvider(ABC): @abstractmethod @@ -15,10 +31,41 @@ def send(self, payload: Sequence[Event]) -> EventReceipt: class EventHandler(ABC, Generic[T]): - - def __init__(self, provider: EventProvider) -> None: - ... + def __init__(self, provider: EventProvider, event_source: str) -> None: + self.provider = provider + self.event_source = event_source @abstractmethod def emit(self, payload: Sequence[T], metadata: dict[str, Any] | None = None) -> EventReceipt: ... + + +def convert_model_to_event_name(model_name: str) -> str: + """ Convert ModelName (pascal) to MODEL_NAME (snake, uppercase)""" + return _pascal_to_snake_pattern.sub(r'_\1', model_name).upper() + + +def build_events_from_models(models: Sequence[AnyModel], event_source: str, metadata: dict[str, Any] | None = None, correlation_id: str = '') -> list[Event]: + metadata = metadata or {} + correlation_id = correlation_id or f'{uuid4()}' + + events = [] + + for model in models: + event_name = convert_model_to_event_name(model_name=model.__class__.__name__) + event_version = getattr(model, '__version__', 'V1').upper() # defaults to V1 + + events.append( + Event( + data=model, + metadata=EventMetadata( + event_name=event_name, + event_source=event_source, + event_version=event_version, + correlation_id=correlation_id, + **metadata + ) + ) + ) + + return events diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index 3cf8708..535bd09 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -1,29 +1,17 @@ from typing import Any from uuid import uuid4 -from product.models.products.product import ProductNotification +from product.models.products.product import ProductChangeNotification from product.stream_processor.dal.events.base import EventHandler, EventProvider from product.stream_processor.dal.events.models.input import Event, EventMetadata from product.stream_processor.dal.events.models.output import EventReceipt -class ProductNotificationHandler(EventHandler): +class ProductChangeNotificationHandler(EventHandler): - def __init__(self, provider: EventProvider) -> None: - self.provider = provider - - def emit(self, payload: list[ProductNotification], metadata: dict[str, Any] | None = None) -> EventReceipt: - metadata = metadata or {} - correlation_id = f'{uuid4()}' # we want the same correlation ID for the batch; use logger correlation ID later - - # NOTE: this will be generic for all events later, we can easily make it reusable - # also consider a method to build event from payload - event_payload = [ - Event( - data=notification.to_dict(), - metadata=EventMetadata(event_type=notification.event_name, event_source=notification.event_source, - event_version=notification.event_version, correlation_id=correlation_id, **metadata)) - for notification in payload - ] + def __init__(self, provider: EventProvider, event_source: str) -> None: + super().__init__(provider=provider, event_source=event_source) + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id: str = '') -> EventReceipt: + event_payload = self.build_event_from_models(models=payload, metadata=metadata, correlation_id=correlation_id) return self.provider.send(payload=event_payload) diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index 64439f3..4124ad2 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -36,7 +36,7 @@ def build_put_events_request(self, payload: list[Event]) -> list['PutEventsReque for event in payload: events.append({ 'Source': event.metadata.event_source, - 'DetailType': f'{event.metadata.event_name}.{event.metadata.event_version}', + 'DetailType': event.metadata.event_name, 'Detail': event.model_dump_json(), 'EventBusName': self.bus_name, 'TraceHeader': os.environ.get(XRAY_TRACE_ID_ENV, ''), diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py index 0803932..1fbe3c5 100644 --- a/tests/unit/stream_processor/test_eventbridge_provider.py +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -5,69 +5,53 @@ from pydantic import BaseModel from product.constants import XRAY_TRACE_ID_ENV +from product.stream_processor.dal.events.base import build_events_from_models, convert_model_to_event_name from product.stream_processor.dal.events.models.input import Event, EventMetadata from product.stream_processor.dal.events.providers.eventbridge import EventBridge def test_eventbridge_build_put_events_from_event_payload(): - # GIVEN + # GIVEN a list of events from a SampleNotification model class SampleNotification(BaseModel): message: str - event_source: ClassVar[str] = 'test' - event_name: ClassVar[str] = 'sample' - event_version: ClassVar[str] = 'v1' + __version__ = 'V1' - event_bus_name = 'sample_bus' notification = SampleNotification(message='test') + events = build_events_from_models(models=[notification], event_source='test') - event = Event( - data=notification, - metadata=EventMetadata( - event_name=SampleNotification.event_name, event_source=SampleNotification.event_source, - event_version=SampleNotification.event_version, correlation_id='test' - ) - ) + # WHEN EventBridge provider builds a PutEvents request + event_provider = EventBridge(bus_name='test_bus') + request = event_provider.build_put_events_request(payload=events) - # WHEN - event_provider = EventBridge(bus_name=event_bus_name) - request = event_provider.build_put_events_request(payload=[event]) + # THEN EventBridge PutEvents request should match our metadata and model data + published_event = request[0] + event = events[0] - # THEN - entry = request[0] - assert entry['Source'] == event.metadata.event_source - assert entry['Detail'] == event.model_dump_json() - assert entry['DetailType'] == f'{event.metadata.event_name}.{event.metadata.event_version}' - assert entry['EventBusName'] == event_bus_name + assert published_event['Source'] == event.metadata.event_source + assert published_event['Detail'] == event.model_dump_json() + assert published_event['DetailType'] == event.metadata.event_name + assert published_event['EventBusName'] == event_provider.bus_name def test_eventbridge_build_put_events_from_event_payload_include_trace_header(monkeypatch: pytest.MonkeyPatch): - # GIVEN + # GIVEN X-Ray Trace ID is available in the environment trace_id = '90835161-3067-47ba-8126-fda76dfdb0b0' monkeypatch.setenv(XRAY_TRACE_ID_ENV, trace_id) class SampleNotification(BaseModel): message: str - event_source: ClassVar[str] = 'test' - event_name: ClassVar[str] = 'sample' - event_version: ClassVar[str] = 'v1' + __version__ = 'v1' event_bus_name = 'sample_bus' notification = SampleNotification(message='test') - - event = Event( - data=notification, - metadata=EventMetadata( - event_name=SampleNotification.event_name, event_source=SampleNotification.event_source, - event_version=SampleNotification.event_version, correlation_id='test' - ) - ) - - # WHEN + events = build_events_from_models(models=[notification], event_source='test') event_provider = EventBridge(bus_name=event_bus_name) - request = event_provider.build_put_events_request(payload=[event]) - # THEN + # WHEN EventBridge provider builds a PutEvents request + request = event_provider.build_put_events_request(payload=events) + + # THEN PutEvents request should include 'TraceHeader' with the available X-Ray Trace ID entry = request[0] assert entry['TraceHeader'] == trace_id From e3115b2586a49e1e0528d41ee5b77b6fadc7e0c7 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 17:09:59 +0200 Subject: [PATCH 33/58] chore: cleanup Signed-off-by: heitorlessa --- .../dal/events/event_handler.py | 10 ++-- .../dal/events/models/output.py | 4 +- tests/unit/stream_processor/conftest.py | 57 ++++++++----------- .../test_eventbridge_provider.py | 6 +- .../test_process_stream_handler.py | 9 ++- .../test_product_notification.py | 11 ++-- 6 files changed, 47 insertions(+), 50 deletions(-) diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index 535bd09..dacdc9b 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -1,9 +1,7 @@ from typing import Any -from uuid import uuid4 from product.models.products.product import ProductChangeNotification -from product.stream_processor.dal.events.base import EventHandler, EventProvider -from product.stream_processor.dal.events.models.input import Event, EventMetadata +from product.stream_processor.dal.events.base import EventHandler, EventProvider, build_events_from_models from product.stream_processor.dal.events.models.output import EventReceipt @@ -12,6 +10,8 @@ class ProductChangeNotificationHandler(EventHandler): def __init__(self, provider: EventProvider, event_source: str) -> None: super().__init__(provider=provider, event_source=event_source) - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id: str = '') -> EventReceipt: - event_payload = self.build_event_from_models(models=payload, metadata=metadata, correlation_id=correlation_id) + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, + correlation_id: str = '') -> EventReceipt: + event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, + event_source=self.event_source) return self.provider.send(payload=event_payload) diff --git a/product/stream_processor/dal/events/models/output.py b/product/stream_processor/dal/events/models/output.py index eed3a59..2c67383 100644 --- a/product/stream_processor/dal/events/models/output.py +++ b/product/stream_processor/dal/events/models/output.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel +from pydantic import BaseModel, Field class EventReceiptSuccessfulNotification(BaseModel): @@ -13,4 +13,4 @@ class EventReceiptUnsuccessfulNotification(BaseModel): class EventReceipt(BaseModel): successful_notifications: list[EventReceiptSuccessfulNotification] - unsuccessful_notifications: list[EventReceiptUnsuccessfulNotification] + unsuccessful_notifications: list[EventReceiptUnsuccessfulNotification] = Field(default_factory=list) diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index b3384bb..8bf0bfc 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -1,10 +1,10 @@ -from typing import Any, Generator, TypedDict, TypeVar - -import pytest +from typing import Any, Generator, TypeVar, Sequence from product.models.products.product import ProductChangeNotification +from product.stream_processor.dal.events.base import EventProvider, build_events_from_models from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler -from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events, generate_product_notifications +from product.stream_processor.dal.events.models.input import Event +from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccessfulNotification from pytest_socket import disable_socket @@ -17,42 +17,35 @@ def pytest_runtest_setup(): Fixture = Generator[T, None, None] -class FakePublishedEvent(TypedDict): - event: ProductChangeNotification - metadata: dict[str, Any] +# Fakes are in-memory implementations of our interface, serving the following purposes: +# -- Remove the need for mocks that need to be aware of scope and return types +# -- Make it easier to assert data structures that would be hard otherwise to introspect +# -- Simple reference for an EventHandler and EventProvider + +class FakeProvider(EventProvider): + def send(self, payload: Sequence[Event]) -> EventReceipt: + notifications = [EventReceiptSuccessfulNotification(receipt_id='test') for _ in payload] + return EventReceipt(successful_notifications=notifications) class FakeEventHandler(ProductChangeNotificationHandler): - def __init__(self): - self.published_events: list[FakePublishedEvent] = [] + def __init__(self, provider: EventProvider = FakeProvider(), event_source: str = 'fake') -> None: + super().__init__(provider=provider, event_source=event_source) + self.published_payloads: list[ProductChangeNotificationHandler] = [] - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None): + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, + correlation_id: str = '') -> EventReceipt: metadata = metadata or {} - for product in payload: - self.published_events.append({'event': product, 'metadata': metadata}) + event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, + event_source='fake') + receipt = self.provider.send(payload=event_payload) - @property - def published_notifications(self) -> list[ProductChangeNotification]: - return [notification['event'] for notification in self.published_events] + self.published_payloads.extend(payload) + return receipt def __len__(self): - return len(self.published_events) + return len(self.published_payloads) def __contains__(self, item: ProductChangeNotification): - return item in self.published_notifications - - -@pytest.fixture -def dynamodb_stream_events() -> Fixture[dict[str, Any]]: - yield generate_dynamodb_stream_events() - - -@pytest.fixture -def event_store() -> Fixture[FakeEventHandler]: - yield FakeEventHandler() - - -@pytest.fixture -def product_notifications() -> Fixture[list[ProductChangeNotification]]: - yield generate_product_notifications() + return item in self.published_payloads diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py index 1fbe3c5..2769299 100644 --- a/tests/unit/stream_processor/test_eventbridge_provider.py +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -1,12 +1,8 @@ -from datetime import datetime -from typing import ClassVar - import pytest from pydantic import BaseModel from product.constants import XRAY_TRACE_ID_ENV -from product.stream_processor.dal.events.base import build_events_from_models, convert_model_to_event_name -from product.stream_processor.dal.events.models.input import Event, EventMetadata +from product.stream_processor.dal.events.base import build_events_from_models from product.stream_processor.dal.events.providers.eventbridge import EventBridge diff --git a/tests/unit/stream_processor/test_process_stream_handler.py b/tests/unit/stream_processor/test_process_stream_handler.py index 8805ab2..66360df 100644 --- a/tests/unit/stream_processor/test_process_stream_handler.py +++ b/tests/unit/stream_processor/test_process_stream_handler.py @@ -1,10 +1,14 @@ from product.stream_processor.handlers.process_stream import process_stream from tests.unit.stream_processor.conftest import FakeEventHandler +from tests.unit.stream_processor.data_builder import generate_dynamodb_stream_events from tests.utils import generate_context -def test_process_stream_notify_product_updates(dynamodb_stream_events: dict, event_store: FakeEventHandler): +def test_process_stream_notify_product_updates(): # GIVEN a DynamoDB stream event and a fake event handler + dynamodb_stream_events = generate_dynamodb_stream_events() + event_store = FakeEventHandler() + # WHEN process_stream is called with a custom event handler process_stream(event=dynamodb_stream_events, context=generate_context(), event_handler=event_store) @@ -14,8 +18,9 @@ def test_process_stream_notify_product_updates(dynamodb_stream_events: dict, eve # NOTE: this should fail once we have schema validation -def test_process_stream_with_empty_records(event_store: FakeEventHandler): +def test_process_stream_with_empty_records(): # GIVEN an empty DynamoDB stream event + event_store = FakeEventHandler() event: dict[str, list] = {'Records': []} # WHEN process_stream is called with a custom event handler diff --git a/tests/unit/stream_processor/test_product_notification.py b/tests/unit/stream_processor/test_product_notification.py index aad4b08..0d1beba 100644 --- a/tests/unit/stream_processor/test_product_notification.py +++ b/tests/unit/stream_processor/test_product_notification.py @@ -1,13 +1,16 @@ -from product.models.products.product import ProductChangeNotification from product.stream_processor.domain_logic.product_notification import notify_product_updates from tests.unit.stream_processor.conftest import FakeEventHandler +from tests.unit.stream_processor.data_builder import generate_product_notifications -def test_product_notifications_are_emitted(product_notifications: list[ProductChangeNotification], event_store: FakeEventHandler): +def test_product_notifications_are_emitted(): # GIVEN a list of Product Notifications and a fake event handler + product_notifications = generate_product_notifications() + event_store = FakeEventHandler() + # WHEN the product notifications are processed - notify_product_updates(update=product_notifications, event_handler=event_store) + receipt = notify_product_updates(update=product_notifications, event_handler=event_store) # THEN the fake event handler should emit these product notifications - assert len(event_store) == len(product_notifications) + assert len(receipt.successful_notifications) == len(product_notifications) assert all(notification in event_store for notification in product_notifications) From 865fb4b7bef37d52cd9d170cdbcfcedccc11bdfd Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 18:13:07 +0200 Subject: [PATCH 34/58] chore: test event structure and model to event conversions Signed-off-by: heitorlessa --- product/stream_processor/dal/events/base.py | 7 +- .../stream_processor/dal/events/constants.py | 1 + tests/unit/stream_processor/test_events.py | 81 +++++++++++++++++++ 3 files changed, 86 insertions(+), 3 deletions(-) create mode 100644 product/stream_processor/dal/events/constants.py create mode 100644 tests/unit/stream_processor/test_events.py diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index 4206270..449c0be 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -3,12 +3,12 @@ from typing import Any, Generic, Sequence, TypeVar from uuid import uuid4 +from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION from product.stream_processor.dal.events.models.input import Event, AnyModel, EventMetadata from product.stream_processor.dal.events.models.output import EventReceipt T = TypeVar('T') - # negative look ahead (?|char). Don't try to match the start of the string and any underscore that follows e.g., `_` and `__` _exclude_underscores = r'(?!^)(? str: return _pascal_to_snake_pattern.sub(r'_\1', model_name).upper() -def build_events_from_models(models: Sequence[AnyModel], event_source: str, metadata: dict[str, Any] | None = None, correlation_id: str = '') -> list[Event]: +def build_events_from_models(models: Sequence[AnyModel], event_source: str, metadata: dict[str, Any] | None = None, + correlation_id: str = '') -> list[Event]: metadata = metadata or {} correlation_id = correlation_id or f'{uuid4()}' @@ -53,7 +54,7 @@ def build_events_from_models(models: Sequence[AnyModel], event_source: str, meta for model in models: event_name = convert_model_to_event_name(model_name=model.__class__.__name__) - event_version = getattr(model, '__version__', 'V1').upper() # defaults to V1 + event_version = getattr(model, '__version__', DEFAULT_EVENT_VERSION) # defaults to v1 events.append( Event( diff --git a/product/stream_processor/dal/events/constants.py b/product/stream_processor/dal/events/constants.py new file mode 100644 index 0000000..edbfc7b --- /dev/null +++ b/product/stream_processor/dal/events/constants.py @@ -0,0 +1 @@ +DEFAULT_EVENT_VERSION = 'v1' diff --git a/tests/unit/stream_processor/test_events.py b/tests/unit/stream_processor/test_events.py new file mode 100644 index 0000000..800a731 --- /dev/null +++ b/tests/unit/stream_processor/test_events.py @@ -0,0 +1,81 @@ +from uuid import uuid4 + +from pydantic import BaseModel + +from product.stream_processor.dal.events.base import build_events_from_models, convert_model_to_event_name +from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION + + +def test_model_to_standard_event(): + # GIVEN a model with __version__ set + class SampleNotification(BaseModel): + message: str + + __version__ = 'v1' + + notification = SampleNotification(message='testing') + event_source = 'test' + + # WHEN we convert to an event + event = build_events_from_models(models=[notification], event_source=event_source)[0] + + # THEN the event should contain our notification in `.data`, all metadata under `.metadata` + # infer the event version from the model, convert PascalCase to SNAKE_CASE_UPPER + assert event.data == notification + assert event.metadata.event_source == event_source + assert event.metadata.event_version == notification.__version__ + assert event.metadata.event_name == convert_model_to_event_name( + notification.__class__.__name__) # SampleNotification -> SAMPLE_NOTIFICATION + assert event.metadata.correlation_id != '' + assert event.metadata.created_at != '' + + +def test_model_to_standard_event_with_correlation_id(): + # GIVEN a model with __version__ set + class SampleNotification(BaseModel): + message: str + + __version__ = 'v1' + + notification = SampleNotification(message='testing') + event_source = 'test' + correlation_id = f'{uuid4()}' + + # WHEN we convert to an event + event = build_events_from_models(models=[notification], event_source=event_source, correlation_id=correlation_id)[0] + + # THEN we should have the same correlation ID in the final event + assert event.metadata.correlation_id == correlation_id + + +def test_model_to_standard_event_with_additional_metadata(): + # GIVEN a model with __version__ set + class SampleNotification(BaseModel): + message: str + + __version__ = 'v1' + + notification = SampleNotification(message='testing') + event_source = 'test' + metadata = {'product_id': 'test', 'username': 'lessa'} + + # WHEN we convert to an event + event = build_events_from_models(models=[notification], event_source=event_source, metadata=metadata)[0] + + # THEN we should have additional metadata included in the final event + assert metadata.items() <= event.metadata.model_dump().items() + + +def test_model_without_version_to_standard_event(): + # GIVEN a model without __version__ set + class SampleNotification(BaseModel): + message: str + + notification = SampleNotification(message='testing') + event_source = 'test' + + # WHEN we convert to an event + event = build_events_from_models(models=[notification], event_source=event_source)[0] + + # THEN we should add a default v1 version + assert event.metadata.event_version == DEFAULT_EVENT_VERSION From 903d0693407855a15820070330524d917ad8ad35 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 18:14:27 +0200 Subject: [PATCH 35/58] chore: adjust comment on event name Signed-off-by: heitorlessa --- tests/unit/stream_processor/test_events.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/unit/stream_processor/test_events.py b/tests/unit/stream_processor/test_events.py index 800a731..5a3fd2b 100644 --- a/tests/unit/stream_processor/test_events.py +++ b/tests/unit/stream_processor/test_events.py @@ -20,12 +20,11 @@ class SampleNotification(BaseModel): event = build_events_from_models(models=[notification], event_source=event_source)[0] # THEN the event should contain our notification in `.data`, all metadata under `.metadata` - # infer the event version from the model, convert PascalCase to SNAKE_CASE_UPPER + # infer the event version from the model, event name infers model name from PascalCase to SNAKE_CASE_UPPER assert event.data == notification assert event.metadata.event_source == event_source assert event.metadata.event_version == notification.__version__ - assert event.metadata.event_name == convert_model_to_event_name( - notification.__class__.__name__) # SampleNotification -> SAMPLE_NOTIFICATION + assert event.metadata.event_name == convert_model_to_event_name(notification.__class__.__name__) assert event.metadata.correlation_id != '' assert event.metadata.created_at != '' From 5ba71d35f2e38e82c6dee7c2b1a17975829cf893 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 20:54:30 +0200 Subject: [PATCH 36/58] chore: complete eventbridge contract tests Signed-off-by: heitorlessa --- product/stream_processor/dal/events/base.py | 15 +- .../dal/events/event_handler.py | 6 +- .../stream_processor/dal/events/exceptions.py | 9 +- .../dal/events/models/output.py | 8 +- .../dal/events/providers/eventbridge.py | 51 ++++--- tests/unit/stream_processor/conftest.py | 20 +-- .../test_eventbridge_provider.py | 133 ++++++++++++++++++ .../test_product_notification.py | 2 +- 8 files changed, 194 insertions(+), 50 deletions(-) diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index 449c0be..76b7d59 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -4,7 +4,7 @@ from uuid import uuid4 from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION -from product.stream_processor.dal.events.models.input import Event, AnyModel, EventMetadata +from product.stream_processor.dal.events.models.input import AnyModel, Event, EventMetadata from product.stream_processor.dal.events.models.output import EventReceipt T = TypeVar('T') @@ -31,6 +31,7 @@ def send(self, payload: Sequence[Event]) -> EventReceipt: class EventHandler(ABC, Generic[T]): + def __init__(self, provider: EventProvider, event_source: str) -> None: self.provider = provider self.event_source = event_source @@ -58,15 +59,7 @@ def build_events_from_models(models: Sequence[AnyModel], event_source: str, meta events.append( Event( - data=model, - metadata=EventMetadata( - event_name=event_name, - event_source=event_source, - event_version=event_version, - correlation_id=correlation_id, - **metadata - ) - ) - ) + data=model, metadata=EventMetadata(event_name=event_name, event_source=event_source, event_version=event_version, + correlation_id=correlation_id, **metadata))) return events diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index dacdc9b..ae288db 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -10,8 +10,6 @@ class ProductChangeNotificationHandler(EventHandler): def __init__(self, provider: EventProvider, event_source: str) -> None: super().__init__(provider=provider, event_source=event_source) - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, - correlation_id: str = '') -> EventReceipt: - event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, - event_source=self.event_source) + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id: str = '') -> EventReceipt: + event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, event_source=self.event_source) return self.provider.send(payload=event_payload) diff --git a/product/stream_processor/dal/events/exceptions.py b/product/stream_processor/dal/events/exceptions.py index 89c7c15..c77a153 100644 --- a/product/stream_processor/dal/events/exceptions.py +++ b/product/stream_processor/dal/events/exceptions.py @@ -1,2 +1,9 @@ +from product.stream_processor.dal.events.models.output import EventReceiptFail + + class ProductNotificationDeliveryError(Exception): - pass + + def __init__(self, message: str, receipts: list[EventReceiptFail]): + super().__init__(message) + self.message = message + self.receipts = receipts diff --git a/product/stream_processor/dal/events/models/output.py b/product/stream_processor/dal/events/models/output.py index 2c67383..743160a 100644 --- a/product/stream_processor/dal/events/models/output.py +++ b/product/stream_processor/dal/events/models/output.py @@ -1,16 +1,16 @@ from pydantic import BaseModel, Field -class EventReceiptSuccessfulNotification(BaseModel): +class EventReceiptSuccess(BaseModel): receipt_id: str -class EventReceiptUnsuccessfulNotification(BaseModel): +class EventReceiptFail(BaseModel): receipt_id: str error: str details: dict class EventReceipt(BaseModel): - successful_notifications: list[EventReceiptSuccessfulNotification] - unsuccessful_notifications: list[EventReceiptUnsuccessfulNotification] = Field(default_factory=list) + success: list[EventReceiptSuccess] + failed: list[EventReceiptFail] = Field(default_factory=list) diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index 4124ad2..a408870 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -2,12 +2,13 @@ from typing import TYPE_CHECKING, Optional import boto3 +import botocore.exceptions from product.constants import XRAY_TRACE_ID_ENV from product.stream_processor.dal.events.base import EventProvider from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError from product.stream_processor.dal.events.models.input import Event -from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccessfulNotification, EventReceiptUnsuccessfulNotification +from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptFail, EventReceiptSuccess if TYPE_CHECKING: from mypy_boto3_events import EventBridgeClient @@ -24,43 +25,55 @@ def send(self, payload: list[Event]) -> EventReceipt: events = self.build_put_events_request(payload) # NOTE: we need a generator that will slice up to 10 event entries - result = self.client.put_events(Entries=events) + try: + result = self.client.put_events(Entries=events) + except botocore.exceptions.ClientError as exc: + error_message = exc.response['Error']['Message'] - successful_requests, unsuccessful_requests = self._collect_receipts(result) - return EventReceipt(successful_notifications=successful_requests, unsuccessful_notifications=unsuccessful_requests) + receipt = EventReceiptFail(receipt_id='', error='error_message', details=exc.response['ResponseMetadata']) + raise ProductNotificationDeliveryError(f'Failed to deliver all events: {error_message}', receipts=[receipt]) from exc + + success, failed = self._collect_receipts(result) + return EventReceipt(success=success, failed=failed) def build_put_events_request(self, payload: list[Event]) -> list['PutEventsRequestEntryTypeDef']: events: list['PutEventsRequestEntryTypeDef'] = [] # 'Time' field is not included to be able to measure end-to-end latency later (time - created_at) for event in payload: - events.append({ + trace_id = os.environ.get(XRAY_TRACE_ID_ENV) + event_request = { 'Source': event.metadata.event_source, 'DetailType': event.metadata.event_name, 'Detail': event.model_dump_json(), 'EventBusName': self.bus_name, - 'TraceHeader': os.environ.get(XRAY_TRACE_ID_ENV, ''), - }) + } + + if trace_id: + event_request['TraceHeader'] = trace_id + + events.append(event_request) return events @staticmethod - def _collect_receipts( - result: 'PutEventsResponseTypeDef') -> tuple[list[EventReceiptSuccessfulNotification], list[EventReceiptUnsuccessfulNotification]]: - successful_requests: list[EventReceiptSuccessfulNotification] = [] - unsuccessful_requests: list[EventReceiptUnsuccessfulNotification] = [] + def _collect_receipts(result: 'PutEventsResponseTypeDef') -> tuple[list[EventReceiptSuccess], list[EventReceiptFail]]: + successes: list[EventReceiptSuccess] = [] + fails: list[EventReceiptFail] = [] for receipt in result['Entries']: - if receipt['ErrorMessage']: - unsuccessful_requests.append( - EventReceiptUnsuccessfulNotification(receipt_id=receipt['EventId'], error=receipt['ErrorMessage'], - details={'error_code': receipt['ErrorCode']})) + error_message = receipt.get('ErrorMessage') + event_id = receipt.get('EventId', '') + + if error_message: + error_code = receipt.get('ErrorCode') + fails.append(EventReceiptFail(receipt_id=event_id, error=error_message, details={'error_code': error_code})) else: - successful_requests.append(EventReceiptSuccessfulNotification(receipt_id=receipt['EventId'])) + successes.append(EventReceiptSuccess(receipt_id=event_id)) # NOTE: Improve this error by correlating which entry failed to send. # We will fail regardless, but it'll be useful for logging and correlation later on. - if result['FailedEntryCount'] >= 0: - raise ProductNotificationDeliveryError(f'Failed to deliver {len(unsuccessful_requests)} events') + if result['FailedEntryCount'] > 0: + raise ProductNotificationDeliveryError(f'Failed to deliver {len(fails)} events', receipts=fails) - return successful_requests, unsuccessful_requests + return successes, fails diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index 8bf0bfc..ad89cf4 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -1,11 +1,12 @@ -from typing import Any, Generator, TypeVar, Sequence +from typing import Any, Generator, Sequence, TypeVar + +from pytest_socket import disable_socket from product.models.products.product import ProductChangeNotification from product.stream_processor.dal.events.base import EventProvider, build_events_from_models from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler from product.stream_processor.dal.events.models.input import Event -from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccessfulNotification -from pytest_socket import disable_socket +from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccess def pytest_runtest_setup(): @@ -16,16 +17,17 @@ def pytest_runtest_setup(): T = TypeVar('T') Fixture = Generator[T, None, None] - # Fakes are in-memory implementations of our interface, serving the following purposes: # -- Remove the need for mocks that need to be aware of scope and return types # -- Make it easier to assert data structures that would be hard otherwise to introspect # -- Simple reference for an EventHandler and EventProvider + class FakeProvider(EventProvider): + def send(self, payload: Sequence[Event]) -> EventReceipt: - notifications = [EventReceiptSuccessfulNotification(receipt_id='test') for _ in payload] - return EventReceipt(successful_notifications=notifications) + notifications = [EventReceiptSuccess(receipt_id='test') for _ in payload] + return EventReceipt(success=notifications) class FakeEventHandler(ProductChangeNotificationHandler): @@ -34,11 +36,9 @@ def __init__(self, provider: EventProvider = FakeProvider(), event_source: str = super().__init__(provider=provider, event_source=event_source) self.published_payloads: list[ProductChangeNotificationHandler] = [] - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, - correlation_id: str = '') -> EventReceipt: + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id: str = '') -> EventReceipt: metadata = metadata or {} - event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, - event_source='fake') + event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, event_source='fake') receipt = self.provider.send(payload=event_payload) self.published_payloads.extend(payload) diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py index 2769299..a5be105 100644 --- a/tests/unit/stream_processor/test_eventbridge_provider.py +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -1,8 +1,14 @@ +from uuid import uuid4 + +import boto3 import pytest +from botocore import stub +from botocore.exceptions import ClientError from pydantic import BaseModel from product.constants import XRAY_TRACE_ID_ENV from product.stream_processor.dal.events.base import build_events_from_models +from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError from product.stream_processor.dal.events.providers.eventbridge import EventBridge @@ -51,3 +57,130 @@ class SampleNotification(BaseModel): # THEN PutEvents request should include 'TraceHeader' with the available X-Ray Trace ID entry = request[0] assert entry['TraceHeader'] == trace_id + + +def test_eventbridge_put_events_with_stubber(): + # GIVEN a list of events from a SampleNotification model and an expected PutEvents request + class SampleNotification(BaseModel): + message: str + + event_bus_name = 'sample_bus' + event_source = 'test' + + notification = SampleNotification(message='testing') + events = build_events_from_models(models=[notification], event_source=event_source) + event = events[0] + + put_events_request = { + 'Entries': [{ + 'Source': event_source, + 'DetailType': event.metadata.event_name, + 'Detail': event.model_dump_json(), + 'EventBusName': event_bus_name + }] + } + + put_events_response = { + 'Entries': [{ + 'EventId': f'{uuid4()}', + }], + 'FailedEntryCount': 0 + } + + # WHEN EventBridge receives a stubbed client and send the event payload + client = boto3.client('events') + stubber = stub.Stubber(client) + stubber.add_response(method='put_events', expected_params=put_events_request, service_response=put_events_response) + stubber.activate() + + event_provider = EventBridge(bus_name=event_bus_name, client=client) + event_provider.send(payload=events) + + # THEN we should use the stubbed client to send the events + # it should lead to no parameter validation error, runtime error on response manipulation syntax errors + + stubber.assert_no_pending_responses() + stubber.deactivate() + + +def test_eventbridge_put_events_with_stubber_partial_failure(): + # GIVEN a list of events from a SampleNotification model and an expected PutEvents request + class SampleNotification(BaseModel): + message: str + + event_bus_name = 'sample_bus' + event_source = 'test' + + notification = SampleNotification(message='testing') + events = build_events_from_models(models=[notification], event_source=event_source) + event = events[0] + + expected_failure_count = 1 + put_events_request = { + 'Entries': [{ + 'Source': event_source, + 'DetailType': event.metadata.event_name, + 'Detail': event.model_dump_json(), + 'EventBusName': event_bus_name + }] + } + + put_events_response = { + 'Entries': [ + { + 'EventId': f'{uuid4()}', + }, + { + # https://docs.aws.amazon.com/eventbridge/latest/APIReference/API_PutEvents.html#API_PutEvents_Errors + 'ErrorCode': 'InternalException', + 'ErrorMessage': 'An internal error occurred' + } + ], + 'FailedEntryCount': expected_failure_count + } + + # WHEN EventBridge receives a stubbed client with at least one FailedEntryCount + client = boto3.client('events') + stubber = stub.Stubber(client) + stubber.add_response(method='put_events', expected_params=put_events_request, service_response=put_events_response) + stubber.activate() + + event_provider = EventBridge(bus_name=event_bus_name, client=client) + + with pytest.raises(ProductNotificationDeliveryError) as exc: + event_provider.send(payload=events) + + # THEN we should receive a ProductNotificationDeliveryError along with its receipts + stubber.assert_no_pending_responses() + stubber.deactivate() + + assert len(exc.value.receipts) == expected_failure_count + + +def test_eventbridge_put_events_with_stubber_service_failure(): + # GIVEN a list of events from a SampleNotification model and an expected PutEvents request + class SampleNotification(BaseModel): + message: str + + event_bus_name = 'sample_bus' + event_source = 'test' + + notification = SampleNotification(message='testing') + events = build_events_from_models(models=[notification], event_source=event_source) + + # WHEN EventBridge receives a stubbed client with at least one FailedEntryCount + client = boto3.client('events') + stubber = stub.Stubber(client) + stubber.add_client_error(method='put_events', http_status_code=500, service_error_code='InternalException', service_message='Oops') + stubber.activate() + + event_provider = EventBridge(bus_name=event_bus_name, client=client) + + with pytest.raises(ProductNotificationDeliveryError) as exc: + event_provider.send(payload=events) + + # THEN we should receive a ProductNotificationDeliveryError along with its receipts + stubber.assert_no_pending_responses() + stubber.deactivate() + + assert len(exc.value.receipts) == 1 diff --git a/tests/unit/stream_processor/test_product_notification.py b/tests/unit/stream_processor/test_product_notification.py index 0d1beba..a84a0d9 100644 --- a/tests/unit/stream_processor/test_product_notification.py +++ b/tests/unit/stream_processor/test_product_notification.py @@ -12,5 +12,5 @@ def test_product_notifications_are_emitted(): receipt = notify_product_updates(update=product_notifications, event_handler=event_store) # THEN the fake event handler should emit these product notifications - assert len(receipt.successful_notifications) == len(product_notifications) + assert len(receipt.success) == len(product_notifications) assert all(notification in event_store for notification in product_notifications) From 2d327bf579597afc26db2e496d7774ee4de90ff7 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 3 Oct 2023 21:20:25 +0200 Subject: [PATCH 37/58] chore: remove dead code Signed-off-by: heitorlessa --- product/models/products/product.py | 2 +- product/stream_processor/dal/events/models/input.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/product/models/products/product.py b/product/models/products/product.py index f530223..16f0ac3 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -20,4 +20,4 @@ class ProductChangeNotification(BaseModel): status: Literal['ADDED', 'REMOVED', 'UPDATED'] created_at: datetime = Field(default_factory=datetime.utcnow) - __version__: str = 'V1' + __version__: str = 'v1' diff --git a/product/stream_processor/dal/events/models/input.py b/product/stream_processor/dal/events/models/input.py index bf52c45..68f1899 100644 --- a/product/stream_processor/dal/events/models/input.py +++ b/product/stream_processor/dal/events/models/input.py @@ -1,11 +1,9 @@ from datetime import datetime from typing import TypeVar -from aws_lambda_powertools import Logger from pydantic import BaseModel, ConfigDict, Field AnyModel = TypeVar('AnyModel', bound=BaseModel) -logger = Logger() class EventMetadata(BaseModel): From f2096013881da932e7d27c77bfa2c4c96377e10c Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 11:38:11 +0200 Subject: [PATCH 38/58] chore: chunk maximum allowed events Signed-off-by: heitorlessa --- product/stream_processor/dal/events/base.py | 8 ++- .../stream_processor/dal/events/constants.py | 1 + .../dal/events/providers/eventbridge.py | 63 ++++++++++--------- .../test_eventbridge_provider.py | 40 +++++++++--- 4 files changed, 74 insertions(+), 38 deletions(-) diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index 76b7d59..9481404 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -1,6 +1,6 @@ import re from abc import ABC, abstractmethod -from typing import Any, Generic, Sequence, TypeVar +from typing import Any, Generator, Generic, Sequence, TypeVar from uuid import uuid4 from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION @@ -63,3 +63,9 @@ def build_events_from_models(models: Sequence[AnyModel], event_source: str, meta correlation_id=correlation_id, **metadata))) return events + + +def chunk_from_list(events: list[T], max_items: int) -> Generator[list[T], None, None]: + for idx in range(0, len(events), max_items): # start, stop, step + # slice the first 10 items, then the next 10 items starting from the index + yield from [events[idx:idx + max_items]] diff --git a/product/stream_processor/dal/events/constants.py b/product/stream_processor/dal/events/constants.py index edbfc7b..6a7e067 100644 --- a/product/stream_processor/dal/events/constants.py +++ b/product/stream_processor/dal/events/constants.py @@ -1 +1,2 @@ DEFAULT_EVENT_VERSION = 'v1' +EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY = 10 diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index a408870..29f63a4 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -1,11 +1,12 @@ import os -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING, Generator, Optional import boto3 import botocore.exceptions from product.constants import XRAY_TRACE_ID_ENV -from product.stream_processor.dal.events.base import EventProvider +from product.stream_processor.dal.events.base import EventProvider, chunk_from_list +from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError from product.stream_processor.dal.events.models.input import Event from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptFail, EventReceiptSuccess @@ -22,39 +23,45 @@ def __init__(self, bus_name: str, client: Optional['EventBridgeClient'] = None): self.client = client or boto3.client('events') def send(self, payload: list[Event]) -> EventReceipt: - events = self.build_put_events_request(payload) + success: list[EventReceiptSuccess] = [] + failed: list[EventReceiptFail] = [] + events = self.build_put_events_requests(payload) + + for batch in events: + try: + result = self.client.put_events(Entries=batch) + ok, not_ok = self._collect_receipts(result) + success.extend(ok) + failed.extend(not_ok) + except botocore.exceptions.ClientError as exc: + error_message = exc.response['Error']['Message'] + + receipt = EventReceiptFail(receipt_id='', error='error_message', details=exc.response['ResponseMetadata']) + raise ProductNotificationDeliveryError(f'Failed to deliver all events: {error_message}', receipts=[receipt]) from exc - # NOTE: we need a generator that will slice up to 10 event entries - try: - result = self.client.put_events(Entries=events) - except botocore.exceptions.ClientError as exc: - error_message = exc.response['Error']['Message'] - - receipt = EventReceiptFail(receipt_id='', error='error_message', details=exc.response['ResponseMetadata']) - raise ProductNotificationDeliveryError(f'Failed to deliver all events: {error_message}', receipts=[receipt]) from exc - - success, failed = self._collect_receipts(result) return EventReceipt(success=success, failed=failed) - def build_put_events_request(self, payload: list[Event]) -> list['PutEventsRequestEntryTypeDef']: - events: list['PutEventsRequestEntryTypeDef'] = [] + def build_put_events_requests(self, payload: list[Event]) -> Generator[list['PutEventsRequestEntryTypeDef'], None, None]: + trace_id = os.environ.get(XRAY_TRACE_ID_ENV) + + for chunk in chunk_from_list(events=payload, max_items=EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY): + events: list['PutEventsRequestEntryTypeDef'] = [] - # 'Time' field is not included to be able to measure end-to-end latency later (time - created_at) - for event in payload: - trace_id = os.environ.get(XRAY_TRACE_ID_ENV) - event_request = { - 'Source': event.metadata.event_source, - 'DetailType': event.metadata.event_name, - 'Detail': event.model_dump_json(), - 'EventBusName': self.bus_name, - } + for event in chunk: + # 'Time' field is not included to be able to measure end-to-end latency later (time - created_at) + event_request = { + 'Source': event.metadata.event_source, + 'DetailType': event.metadata.event_name, + 'Detail': event.model_dump_json(), + 'EventBusName': self.bus_name, + } - if trace_id: - event_request['TraceHeader'] = trace_id + if trace_id: + event_request['TraceHeader'] = trace_id - events.append(event_request) + events.append(event_request) - return events + yield events @staticmethod def _collect_receipts(result: 'PutEventsResponseTypeDef') -> tuple[list[EventReceiptSuccess], list[EventReceiptFail]]: diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py index a5be105..536a38c 100644 --- a/tests/unit/stream_processor/test_eventbridge_provider.py +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -3,11 +3,11 @@ import boto3 import pytest from botocore import stub -from botocore.exceptions import ClientError from pydantic import BaseModel from product.constants import XRAY_TRACE_ID_ENV from product.stream_processor.dal.events.base import build_events_from_models +from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError from product.stream_processor.dal.events.providers.eventbridge import EventBridge @@ -24,16 +24,16 @@ class SampleNotification(BaseModel): # WHEN EventBridge provider builds a PutEvents request event_provider = EventBridge(bus_name='test_bus') - request = event_provider.build_put_events_request(payload=events) + requests = event_provider.build_put_events_requests(payload=events) # THEN EventBridge PutEvents request should match our metadata and model data - published_event = request[0] + request = next(requests)[0] event = events[0] - assert published_event['Source'] == event.metadata.event_source - assert published_event['Detail'] == event.model_dump_json() - assert published_event['DetailType'] == event.metadata.event_name - assert published_event['EventBusName'] == event_provider.bus_name + assert request['Source'] == event.metadata.event_source + assert request['Detail'] == event.model_dump_json() + assert request['DetailType'] == event.metadata.event_name + assert request['EventBusName'] == event_provider.bus_name def test_eventbridge_build_put_events_from_event_payload_include_trace_header(monkeypatch: pytest.MonkeyPatch): @@ -52,13 +52,35 @@ class SampleNotification(BaseModel): event_provider = EventBridge(bus_name=event_bus_name) # WHEN EventBridge provider builds a PutEvents request - request = event_provider.build_put_events_request(payload=events) + requests = event_provider.build_put_events_requests(payload=events) # THEN PutEvents request should include 'TraceHeader' with the available X-Ray Trace ID - entry = request[0] + entry = next(requests)[0] assert entry['TraceHeader'] == trace_id +def test_eventbridge_build_put_events_respect_max_entries_limit(): + # GIVEN an even number of events to be sent to EventBridge PutEvents API that are higher than 10 (limit) + class SampleNotification(BaseModel): + message: str + + number_of_events = 20 + + notifications = [SampleNotification(message='test') for _ in range(number_of_events)] + events = build_events_from_models(models=notifications, event_source='test') + + # WHEN EventBridge provider builds a PutEvents request + requests = EventBridge(bus_name='test_bus').build_put_events_requests(payload=events) + + # THEN we should have a generator with two batches of the maximum permitted entry (EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY) + first_batch = next(requests) + second_batch = next(requests) + + assert len(first_batch) == EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY + assert len(second_batch) == EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY + assert len(list(requests)) == 0 + + def test_eventbridge_put_events_with_stubber(): # GIVEN a list of events from a SampleNotification model and an expected PutEvents request class SampleNotification(BaseModel): From 6dc49549e02bb4041c47f453fd8f86d3fcf385f5 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 12:16:14 +0200 Subject: [PATCH 39/58] chore: test chunk logic separately Signed-off-by: heitorlessa --- tests/unit/stream_processor/test_functions.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 tests/unit/stream_processor/test_functions.py diff --git a/tests/unit/stream_processor/test_functions.py b/tests/unit/stream_processor/test_functions.py new file mode 100644 index 0000000..5b643e5 --- /dev/null +++ b/tests/unit/stream_processor/test_functions.py @@ -0,0 +1,42 @@ +from product.stream_processor.dal.events.base import chunk_from_list + + +def test_chunk_from_list_returns_empty_list_when_list_is_empty(): + # GIVEN an empty list of items and a chunk size of 3 + list_of_items = [] + chunk_size = 3 + expected_chunk = [] + + # WHEN we call chunk_from_list + actual_chunk = chunk_from_list(list_of_items, chunk_size) + + # THEN we get an empty chunk + assert actual_chunk == expected_chunk + + +def test_chunk_from_list_returns_single_chunk_when_list_size_is_less_than_chunk_size(): + # GIVEN a list of items and a chunk size of 3 + list_of_items = [1, 2, 3] + chunk_size = 3 + expected_chunk = [1, 2, 3] + + # WHEN we call chunk_from_list + actual_chunk = next(chunk_from_list(list_of_items, chunk_size)) + + # THEN we get a chunk of the same size as the list + assert actual_chunk == expected_chunk + assert len(actual_chunk) == len(expected_chunk) + assert len(actual_chunk) == len(list_of_items) + + +def test_chunk_from_list_returns_multiple_chunks_when_list_size_is_greater_than_chunk_size(): + # GIVEN a list of items and a chunk size of 3 + list_of_items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + chunk_size = 3 + expected_chunks = [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]] + + # WHEN we call chunk_from_list + actual_chunks = list(chunk_from_list(list_of_items, chunk_size)) + + # THEN we get a chunk of the same size as the list + assert actual_chunks == expected_chunks From 86258ebb1a5ca3d4220cc516403edda58e3567e1 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 12:16:22 +0200 Subject: [PATCH 40/58] chore: linting Signed-off-by: heitorlessa --- product/models/products/product.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/product/models/products/product.py b/product/models/products/product.py index 16f0ac3..6cca04a 100644 --- a/product/models/products/product.py +++ b/product/models/products/product.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Annotated, ClassVar, Literal +from typing import Annotated, Literal from pydantic import BaseModel, Field, PositiveInt from pydantic.functional_validators import AfterValidator From bd714c6281217c430c1abb7a58d8877230788f70 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 12:36:06 +0200 Subject: [PATCH 41/58] refactor: move standalones to functions.py; complete coverage Signed-off-by: heitorlessa --- product/stream_processor/dal/events/base.py | 43 +-------------- .../dal/events/event_handler.py | 3 +- .../stream_processor/dal/events/functions.py | 45 ++++++++++++++++ .../dal/events/providers/eventbridge.py | 3 +- tests/unit/stream_processor/conftest.py | 3 +- .../test_eventbridge_provider.py | 2 +- tests/unit/stream_processor/test_events.py | 2 +- tests/unit/stream_processor/test_functions.py | 52 ++++++++++++++++++- 8 files changed, 105 insertions(+), 48 deletions(-) create mode 100644 product/stream_processor/dal/events/functions.py diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index 9481404..a609c89 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -1,26 +1,17 @@ -import re from abc import ABC, abstractmethod -from typing import Any, Generator, Generic, Sequence, TypeVar -from uuid import uuid4 +from typing import Any, Generic, Sequence, TypeVar -from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION -from product.stream_processor.dal.events.models.input import AnyModel, Event, EventMetadata +from product.stream_processor.dal.events.models.input import Event from product.stream_processor.dal.events.models.output import EventReceipt T = TypeVar('T') # negative look ahead (?|char). Don't try to match the start of the string and any underscore that follows e.g., `_` and `__` -_exclude_underscores = r'(?!^)(? Product_Notification # ProductNotificationV2 -> Product_Notification_V2 # ProductHTTP -> Product_HTTP -_pascal_to_snake_pattern = re.compile(rf'({_exclude_underscores}{_pascal_case}{_or}{_followed_by_lower_case_or_digit}') class EventProvider(ABC): @@ -39,33 +30,3 @@ def __init__(self, provider: EventProvider, event_source: str) -> None: @abstractmethod def emit(self, payload: Sequence[T], metadata: dict[str, Any] | None = None) -> EventReceipt: ... - - -def convert_model_to_event_name(model_name: str) -> str: - """ Convert ModelName (pascal) to MODEL_NAME (snake, uppercase)""" - return _pascal_to_snake_pattern.sub(r'_\1', model_name).upper() - - -def build_events_from_models(models: Sequence[AnyModel], event_source: str, metadata: dict[str, Any] | None = None, - correlation_id: str = '') -> list[Event]: - metadata = metadata or {} - correlation_id = correlation_id or f'{uuid4()}' - - events = [] - - for model in models: - event_name = convert_model_to_event_name(model_name=model.__class__.__name__) - event_version = getattr(model, '__version__', DEFAULT_EVENT_VERSION) # defaults to v1 - - events.append( - Event( - data=model, metadata=EventMetadata(event_name=event_name, event_source=event_source, event_version=event_version, - correlation_id=correlation_id, **metadata))) - - return events - - -def chunk_from_list(events: list[T], max_items: int) -> Generator[list[T], None, None]: - for idx in range(0, len(events), max_items): # start, stop, step - # slice the first 10 items, then the next 10 items starting from the index - yield from [events[idx:idx + max_items]] diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index ae288db..b7adb19 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -1,7 +1,8 @@ from typing import Any from product.models.products.product import ProductChangeNotification -from product.stream_processor.dal.events.base import EventHandler, EventProvider, build_events_from_models +from product.stream_processor.dal.events.base import EventHandler, EventProvider +from product.stream_processor.dal.events.functions import build_events_from_models from product.stream_processor.dal.events.models.output import EventReceipt diff --git a/product/stream_processor/dal/events/functions.py b/product/stream_processor/dal/events/functions.py new file mode 100644 index 0000000..fae2649 --- /dev/null +++ b/product/stream_processor/dal/events/functions.py @@ -0,0 +1,45 @@ +import re +from typing import Sequence, Any, Generator, TypeVar +from uuid import uuid4 + +from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION +from product.stream_processor.dal.events.models.input import AnyModel, Event, EventMetadata + +T = TypeVar('T') + +_exclude_underscores = r'(?!^)(? str: + """ Convert ModelName (pascal) to MODEL_NAME (snake, uppercase)""" + return _pascal_to_snake_pattern.sub(r'_\1', model_name).upper() + + +def build_events_from_models(models: Sequence[AnyModel], event_source: str, metadata: dict[str, Any] | None = None, + correlation_id: str = '') -> list[Event]: + metadata = metadata or {} + correlation_id = correlation_id or f'{uuid4()}' + + events = [] + + for model in models: + event_name = convert_model_to_event_name(model_name=model.__class__.__name__) + event_version = getattr(model, '__version__', DEFAULT_EVENT_VERSION) # defaults to v1 + + events.append( + Event( + data=model, + metadata=EventMetadata(event_name=event_name, event_source=event_source, event_version=event_version, + correlation_id=correlation_id, **metadata))) + + return events + + +def chunk_from_list(events: list[T], max_items: int) -> Generator[list[T], None, None]: + for idx in range(0, len(events), max_items): # start, stop, step + # slice the first 10 items, then the next 10 items starting from the index + yield from [events[idx:idx + max_items]] diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index 29f63a4..22d6063 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -5,7 +5,8 @@ import botocore.exceptions from product.constants import XRAY_TRACE_ID_ENV -from product.stream_processor.dal.events.base import EventProvider, chunk_from_list +from product.stream_processor.dal.events.base import EventProvider +from product.stream_processor.dal.events.functions import chunk_from_list from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError from product.stream_processor.dal.events.models.input import Event diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index ad89cf4..e93cd2b 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -3,7 +3,8 @@ from pytest_socket import disable_socket from product.models.products.product import ProductChangeNotification -from product.stream_processor.dal.events.base import EventProvider, build_events_from_models +from product.stream_processor.dal.events.base import EventProvider +from product.stream_processor.dal.events.functions import build_events_from_models from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler from product.stream_processor.dal.events.models.input import Event from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccess diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py index 536a38c..c006481 100644 --- a/tests/unit/stream_processor/test_eventbridge_provider.py +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -6,7 +6,7 @@ from pydantic import BaseModel from product.constants import XRAY_TRACE_ID_ENV -from product.stream_processor.dal.events.base import build_events_from_models +from product.stream_processor.dal.events.functions import build_events_from_models from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError from product.stream_processor.dal.events.providers.eventbridge import EventBridge diff --git a/tests/unit/stream_processor/test_events.py b/tests/unit/stream_processor/test_events.py index 5a3fd2b..7048089 100644 --- a/tests/unit/stream_processor/test_events.py +++ b/tests/unit/stream_processor/test_events.py @@ -2,7 +2,7 @@ from pydantic import BaseModel -from product.stream_processor.dal.events.base import build_events_from_models, convert_model_to_event_name +from product.stream_processor.dal.events.functions import convert_model_to_event_name, build_events_from_models from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION diff --git a/tests/unit/stream_processor/test_functions.py b/tests/unit/stream_processor/test_functions.py index 5b643e5..49b4415 100644 --- a/tests/unit/stream_processor/test_functions.py +++ b/tests/unit/stream_processor/test_functions.py @@ -1,4 +1,7 @@ -from product.stream_processor.dal.events.base import chunk_from_list +from product.stream_processor.dal.events.functions import chunk_from_list, convert_model_to_event_name, build_events_from_models +from pydantic import BaseModel + +from product.stream_processor.dal.events.models.input import Event def test_chunk_from_list_returns_empty_list_when_list_is_empty(): @@ -11,7 +14,7 @@ def test_chunk_from_list_returns_empty_list_when_list_is_empty(): actual_chunk = chunk_from_list(list_of_items, chunk_size) # THEN we get an empty chunk - assert actual_chunk == expected_chunk + assert list(actual_chunk) == expected_chunk def test_chunk_from_list_returns_single_chunk_when_list_size_is_less_than_chunk_size(): @@ -40,3 +43,48 @@ def test_chunk_from_list_returns_multiple_chunks_when_list_size_is_greater_than_ # THEN we get a chunk of the same size as the list assert actual_chunks == expected_chunks + + +def test_convert_pascal_case_to_snake_case_with_convert_model_to_event_name(): + # GIVEN a model name in pascal case + model_name = 'ProductNotification' + + # WHEN we call convert_model_to_event_name + event_name = convert_model_to_event_name(model_name) + + # THEN we get the expected event name + assert event_name == 'product_notification'.upper() + +def test_convert_model_to_event_name_with_uppercase(): + # GIVEN a model name in pascal case + model_name = 'ProductHTTPNotification' + + # WHEN we call convert_model_to_event_name + event_name = convert_model_to_event_name(model_name) + + # THEN we get the expected event name + assert event_name == 'product_http_notification'.upper() + + +def test_convert_model_to_event_name_with_numbers(): + # GIVEN a model name in pascal case + model_name = 'ProductHTTPNotification123' + + # WHEN we call convert_model_to_event_name + event_name = convert_model_to_event_name(model_name) + + # THEN we get the expected event name + assert event_name == 'product_http_notification123'.upper() + + +def test_build_events_from_models(): + # GIVEN any Pydantic model + class SampleNotification(BaseModel): + message: str + + # WHEN we call build_events_from_models with all required fields + notification = SampleNotification(message='Hello World!') + event = build_events_from_models(models=[notification], event_source='sample') + + # THEN we get a list of Events + assert type(event[0]) == Event From c8a5b55dcc5038a1b30a6648dcfca57e0e74da7e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 12:41:53 +0200 Subject: [PATCH 42/58] refactor: move standalones to functions.py; complete coverage Signed-off-by: heitorlessa --- product/stream_processor/dal/events/functions.py | 7 +++---- .../stream_processor/dal/events/providers/eventbridge.py | 2 +- .../stream_processor/domain_logic/product_notification.py | 3 ++- tests/unit/stream_processor/conftest.py | 2 +- tests/unit/stream_processor/test_eventbridge_provider.py | 2 +- tests/unit/stream_processor/test_events.py | 2 +- tests/unit/stream_processor/test_functions.py | 3 ++- 7 files changed, 11 insertions(+), 10 deletions(-) diff --git a/product/stream_processor/dal/events/functions.py b/product/stream_processor/dal/events/functions.py index fae2649..84ff0d0 100644 --- a/product/stream_processor/dal/events/functions.py +++ b/product/stream_processor/dal/events/functions.py @@ -1,5 +1,5 @@ import re -from typing import Sequence, Any, Generator, TypeVar +from typing import Any, Generator, Sequence, TypeVar from uuid import uuid4 from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION @@ -32,9 +32,8 @@ def build_events_from_models(models: Sequence[AnyModel], event_source: str, meta events.append( Event( - data=model, - metadata=EventMetadata(event_name=event_name, event_source=event_source, event_version=event_version, - correlation_id=correlation_id, **metadata))) + data=model, metadata=EventMetadata(event_name=event_name, event_source=event_source, event_version=event_version, + correlation_id=correlation_id, **metadata))) return events diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index 22d6063..3e6a99d 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -6,9 +6,9 @@ from product.constants import XRAY_TRACE_ID_ENV from product.stream_processor.dal.events.base import EventProvider -from product.stream_processor.dal.events.functions import chunk_from_list from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError +from product.stream_processor.dal.events.functions import chunk_from_list from product.stream_processor.dal.events.models.input import Event from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptFail, EventReceiptSuccess diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index 3f0a03a..d2ecd7b 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -5,10 +5,11 @@ from product.stream_processor.dal.events.providers.eventbridge import EventBridge EVENT_BUS = os.environ.get('EVENT_BUS', '') +EVENT_SOURCE = 'myorg.product.product_notification' def notify_product_updates(update: list[ProductChangeNotification], event_handler: ProductChangeNotificationHandler | None = None): if event_handler is None: - event_handler = ProductChangeNotificationHandler(provider=EventBridge(EVENT_BUS)) + event_handler = ProductChangeNotificationHandler(provider=EventBridge(EVENT_BUS), event_source=EVENT_SOURCE) return event_handler.emit(payload=update) diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index e93cd2b..48e9bb1 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -4,8 +4,8 @@ from product.models.products.product import ProductChangeNotification from product.stream_processor.dal.events.base import EventProvider -from product.stream_processor.dal.events.functions import build_events_from_models from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler +from product.stream_processor.dal.events.functions import build_events_from_models from product.stream_processor.dal.events.models.input import Event from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccess diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py index c006481..df8d1a9 100644 --- a/tests/unit/stream_processor/test_eventbridge_provider.py +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -6,9 +6,9 @@ from pydantic import BaseModel from product.constants import XRAY_TRACE_ID_ENV -from product.stream_processor.dal.events.functions import build_events_from_models from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError +from product.stream_processor.dal.events.functions import build_events_from_models from product.stream_processor.dal.events.providers.eventbridge import EventBridge diff --git a/tests/unit/stream_processor/test_events.py b/tests/unit/stream_processor/test_events.py index 7048089..2b58b4b 100644 --- a/tests/unit/stream_processor/test_events.py +++ b/tests/unit/stream_processor/test_events.py @@ -2,8 +2,8 @@ from pydantic import BaseModel -from product.stream_processor.dal.events.functions import convert_model_to_event_name, build_events_from_models from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION +from product.stream_processor.dal.events.functions import build_events_from_models, convert_model_to_event_name def test_model_to_standard_event(): diff --git a/tests/unit/stream_processor/test_functions.py b/tests/unit/stream_processor/test_functions.py index 49b4415..dfd78a6 100644 --- a/tests/unit/stream_processor/test_functions.py +++ b/tests/unit/stream_processor/test_functions.py @@ -1,6 +1,6 @@ -from product.stream_processor.dal.events.functions import chunk_from_list, convert_model_to_event_name, build_events_from_models from pydantic import BaseModel +from product.stream_processor.dal.events.functions import build_events_from_models, chunk_from_list, convert_model_to_event_name from product.stream_processor.dal.events.models.input import Event @@ -55,6 +55,7 @@ def test_convert_pascal_case_to_snake_case_with_convert_model_to_event_name(): # THEN we get the expected event name assert event_name == 'product_notification'.upper() + def test_convert_model_to_event_name_with_uppercase(): # GIVEN a model name in pascal case model_name = 'ProductHTTPNotification' From 7e219ed38ca10e907a01fd88ca9a44627dfadea0 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 14:17:42 +0200 Subject: [PATCH 43/58] fix(mypy): narrow typing Signed-off-by: heitorlessa --- product/stream_processor/dal/events/base.py | 6 +++--- product/stream_processor/dal/events/event_handler.py | 3 ++- product/stream_processor/dal/events/models/input.py | 4 ++-- tests/unit/stream_processor/conftest.py | 5 +++-- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/dal/events/base.py index a609c89..eb52a40 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/dal/events/base.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Any, Generic, Sequence, TypeVar +from typing import Any, Generic, TypeVar from product.stream_processor.dal.events.models.input import Event from product.stream_processor.dal.events.models.output import EventReceipt @@ -17,7 +17,7 @@ class EventProvider(ABC): @abstractmethod - def send(self, payload: Sequence[Event]) -> EventReceipt: + def send(self, payload: list[Event]) -> EventReceipt: ... @@ -28,5 +28,5 @@ def __init__(self, provider: EventProvider, event_source: str) -> None: self.event_source = event_source @abstractmethod - def emit(self, payload: Sequence[T], metadata: dict[str, Any] | None = None) -> EventReceipt: + def emit(self, payload: list[T], metadata: dict[str, Any] | None = None, correlation_id='') -> EventReceipt: ... diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index b7adb19..298ff9c 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -11,6 +11,7 @@ class ProductChangeNotificationHandler(EventHandler): def __init__(self, provider: EventProvider, event_source: str) -> None: super().__init__(provider=provider, event_source=event_source) - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id: str = '') -> EventReceipt: + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, + correlation_id='') -> EventReceipt: event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, event_source=self.event_source) return self.provider.send(payload=event_payload) diff --git a/product/stream_processor/dal/events/models/input.py b/product/stream_processor/dal/events/models/input.py index 68f1899..f010287 100644 --- a/product/stream_processor/dal/events/models/input.py +++ b/product/stream_processor/dal/events/models/input.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import TypeVar +from typing import TypeVar, Generic from pydantic import BaseModel, ConfigDict, Field @@ -16,6 +16,6 @@ class EventMetadata(BaseModel): model_config = ConfigDict(extra='allow') -class Event(BaseModel): +class Event(BaseModel, Generic[AnyModel]): data: AnyModel metadata: EventMetadata diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index 48e9bb1..9a015a6 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -35,9 +35,10 @@ class FakeEventHandler(ProductChangeNotificationHandler): def __init__(self, provider: EventProvider = FakeProvider(), event_source: str = 'fake') -> None: super().__init__(provider=provider, event_source=event_source) - self.published_payloads: list[ProductChangeNotificationHandler] = [] + self.published_payloads: list[ProductChangeNotification] = [] - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id: str = '') -> EventReceipt: + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, + correlation_id='') -> EventReceipt: metadata = metadata or {} event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, event_source='fake') receipt = self.provider.send(payload=event_payload) From 0112d0cd80ddaf99c383a18b014654a8ca0ee80d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 14:17:54 +0200 Subject: [PATCH 44/58] chore: enable pydantic plugin for mypy Signed-off-by: heitorlessa --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 6383955..677d2f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,3 +81,11 @@ ignore_patterns = [".git", ".venv", ".build", "cdk.out", "node_modules"] [tool.pytest.ini_options] testpaths = "tests" + +[tool.mypy] +plugins = "pydantic.mypy" + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true From 24206bd31fed3ccb5a458cf970a2701b9b83c5cc Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 14:19:16 +0200 Subject: [PATCH 45/58] chore: explicit typed dict type as mypy can't infer Signed-off-by: heitorlessa --- product/stream_processor/dal/events/providers/eventbridge.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/dal/events/providers/eventbridge.py index 3e6a99d..7efb6db 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/dal/events/providers/eventbridge.py @@ -50,7 +50,7 @@ def build_put_events_requests(self, payload: list[Event]) -> Generator[list['Put for event in chunk: # 'Time' field is not included to be able to measure end-to-end latency later (time - created_at) - event_request = { + event_request: 'PutEventsRequestEntryTypeDef' = { 'Source': event.metadata.event_source, 'DetailType': event.metadata.event_name, 'Detail': event.model_dump_json(), From c6e8c4873bbf4b04b6e1fe5f380af6f8dd32620f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 14:28:18 +0200 Subject: [PATCH 46/58] chore: explicit type as mypy can't infer Signed-off-by: heitorlessa --- product/stream_processor/dal/events/functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/product/stream_processor/dal/events/functions.py b/product/stream_processor/dal/events/functions.py index 84ff0d0..981b824 100644 --- a/product/stream_processor/dal/events/functions.py +++ b/product/stream_processor/dal/events/functions.py @@ -24,7 +24,7 @@ def build_events_from_models(models: Sequence[AnyModel], event_source: str, meta metadata = metadata or {} correlation_id = correlation_id or f'{uuid4()}' - events = [] + events: list[Event] = [] for model in models: event_name = convert_model_to_event_name(model_name=model.__class__.__name__) From 4a0aaef802315602ef48b0681801cd3c6af24b56 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 14:29:02 +0200 Subject: [PATCH 47/58] chore: actually apply pydantic plugin; skip pytest_socket missing py.typed Signed-off-by: heitorlessa --- mypy.ini | 8 ++++++++ pyproject.toml | 8 -------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/mypy.ini b/mypy.ini index b8a390f..a85c9e7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -7,7 +7,12 @@ warn_unused_ignores=True show_column_numbers = True show_error_codes = True show_error_context = True +plugins = pydantic.mypy +[pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true # Disable specific error codes in the 'tests' package [mypy-tests.*] @@ -67,3 +72,6 @@ ignore_missing_imports = True [mypy-setuptools] ignore_missing_imports = True + +[mypy-pytest_socket] +ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml index 677d2f0..6383955 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,11 +81,3 @@ ignore_patterns = [".git", ".venv", ".build", "cdk.out", "node_modules"] [tool.pytest.ini_options] testpaths = "tests" - -[tool.mypy] -plugins = "pydantic.mypy" - -[tool.pydantic-mypy] -init_forbid_extra = true -init_typed = true -warn_required_dynamic_aliases = true From cd157e9e2805ea9b304950ff9afa875b3033651e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 14:33:17 +0200 Subject: [PATCH 48/58] chore: make pr fixes Signed-off-by: heitorlessa --- product/stream_processor/dal/events/event_handler.py | 3 +-- product/stream_processor/dal/events/models/input.py | 2 +- tests/unit/stream_processor/conftest.py | 3 +-- tests/unit/stream_processor/test_functions.py | 2 +- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/dal/events/event_handler.py index 298ff9c..6ad01ae 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/dal/events/event_handler.py @@ -11,7 +11,6 @@ class ProductChangeNotificationHandler(EventHandler): def __init__(self, provider: EventProvider, event_source: str) -> None: super().__init__(provider=provider, event_source=event_source) - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, - correlation_id='') -> EventReceipt: + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id='') -> EventReceipt: event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, event_source=self.event_source) return self.provider.send(payload=event_payload) diff --git a/product/stream_processor/dal/events/models/input.py b/product/stream_processor/dal/events/models/input.py index f010287..3be7fb7 100644 --- a/product/stream_processor/dal/events/models/input.py +++ b/product/stream_processor/dal/events/models/input.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import TypeVar, Generic +from typing import Generic, TypeVar from pydantic import BaseModel, ConfigDict, Field diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index 9a015a6..b903c57 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -37,8 +37,7 @@ def __init__(self, provider: EventProvider = FakeProvider(), event_source: str = super().__init__(provider=provider, event_source=event_source) self.published_payloads: list[ProductChangeNotification] = [] - def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, - correlation_id='') -> EventReceipt: + def emit(self, payload: list[ProductChangeNotification], metadata: dict[str, Any] | None = None, correlation_id='') -> EventReceipt: metadata = metadata or {} event_payload = build_events_from_models(models=payload, metadata=metadata, correlation_id=correlation_id, event_source='fake') receipt = self.provider.send(payload=event_payload) diff --git a/tests/unit/stream_processor/test_functions.py b/tests/unit/stream_processor/test_functions.py index dfd78a6..6fc6b22 100644 --- a/tests/unit/stream_processor/test_functions.py +++ b/tests/unit/stream_processor/test_functions.py @@ -88,4 +88,4 @@ class SampleNotification(BaseModel): event = build_events_from_models(models=[notification], event_source='sample') # THEN we get a list of Events - assert type(event[0]) == Event + assert type(event[0]) is Event From fd003d7f6fc136b5813a188986e1bd911d9155eb Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 14:42:50 +0200 Subject: [PATCH 49/58] refactor: rename dal to integrations Signed-off-by: heitorlessa --- .../domain_logic/product_notification.py | 4 ++-- product/stream_processor/handlers/process_stream.py | 2 +- .../{dal => integrations}/__init__.py | 0 .../{dal => integrations}/events/__init__.py | 0 .../{dal => integrations}/events/base.py | 4 ++-- .../{dal => integrations}/events/constants.py | 0 .../{dal => integrations}/events/event_handler.py | 6 +++--- .../{dal => integrations}/events/exceptions.py | 2 +- .../{dal => integrations}/events/functions.py | 4 ++-- .../{dal => integrations}/events/models/__init__.py | 0 .../{dal => integrations}/events/models/input.py | 0 .../{dal => integrations}/events/models/output.py | 0 .../events/providers/__init__.py | 0 .../events/providers/eventbridge.py | 12 ++++++------ tests/unit/stream_processor/conftest.py | 10 +++++----- .../stream_processor/test_eventbridge_provider.py | 8 ++++---- tests/unit/stream_processor/test_events.py | 4 ++-- tests/unit/stream_processor/test_functions.py | 4 ++-- 18 files changed, 30 insertions(+), 30 deletions(-) rename product/stream_processor/{dal => integrations}/__init__.py (100%) rename product/stream_processor/{dal => integrations}/events/__init__.py (100%) rename product/stream_processor/{dal => integrations}/events/base.py (84%) rename product/stream_processor/{dal => integrations}/events/constants.py (100%) rename product/stream_processor/{dal => integrations}/events/event_handler.py (70%) rename product/stream_processor/{dal => integrations}/events/exceptions.py (71%) rename product/stream_processor/{dal => integrations}/events/functions.py (89%) rename product/stream_processor/{dal => integrations}/events/models/__init__.py (100%) rename product/stream_processor/{dal => integrations}/events/models/input.py (100%) rename product/stream_processor/{dal => integrations}/events/models/output.py (100%) rename product/stream_processor/{dal => integrations}/events/providers/__init__.py (100%) rename product/stream_processor/{dal => integrations}/events/providers/eventbridge.py (85%) diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index d2ecd7b..6d08d88 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -1,8 +1,8 @@ import os from product.models.products.product import ProductChangeNotification -from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler -from product.stream_processor.dal.events.providers.eventbridge import EventBridge +from product.stream_processor.integrations.events.event_handler import ProductChangeNotificationHandler +from product.stream_processor.integrations.events.providers.eventbridge import EventBridge EVENT_BUS = os.environ.get('EVENT_BUS', '') EVENT_SOURCE = 'myorg.product.product_notification' diff --git a/product/stream_processor/handlers/process_stream.py b/product/stream_processor/handlers/process_stream.py index 599979e..0f5f747 100644 --- a/product/stream_processor/handlers/process_stream.py +++ b/product/stream_processor/handlers/process_stream.py @@ -5,8 +5,8 @@ from aws_lambda_powertools.utilities.typing import LambdaContext from product.models.products.product import ProductChangeNotification -from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler from product.stream_processor.domain_logic.product_notification import notify_product_updates +from product.stream_processor.integrations.events.event_handler import ProductChangeNotificationHandler logger = Logger() diff --git a/product/stream_processor/dal/__init__.py b/product/stream_processor/integrations/__init__.py similarity index 100% rename from product/stream_processor/dal/__init__.py rename to product/stream_processor/integrations/__init__.py diff --git a/product/stream_processor/dal/events/__init__.py b/product/stream_processor/integrations/events/__init__.py similarity index 100% rename from product/stream_processor/dal/events/__init__.py rename to product/stream_processor/integrations/events/__init__.py diff --git a/product/stream_processor/dal/events/base.py b/product/stream_processor/integrations/events/base.py similarity index 84% rename from product/stream_processor/dal/events/base.py rename to product/stream_processor/integrations/events/base.py index eb52a40..d7f1873 100644 --- a/product/stream_processor/dal/events/base.py +++ b/product/stream_processor/integrations/events/base.py @@ -1,8 +1,8 @@ from abc import ABC, abstractmethod from typing import Any, Generic, TypeVar -from product.stream_processor.dal.events.models.input import Event -from product.stream_processor.dal.events.models.output import EventReceipt +from product.stream_processor.integrations.events.models.input import Event +from product.stream_processor.integrations.events.models.output import EventReceipt T = TypeVar('T') diff --git a/product/stream_processor/dal/events/constants.py b/product/stream_processor/integrations/events/constants.py similarity index 100% rename from product/stream_processor/dal/events/constants.py rename to product/stream_processor/integrations/events/constants.py diff --git a/product/stream_processor/dal/events/event_handler.py b/product/stream_processor/integrations/events/event_handler.py similarity index 70% rename from product/stream_processor/dal/events/event_handler.py rename to product/stream_processor/integrations/events/event_handler.py index 6ad01ae..feca1e7 100644 --- a/product/stream_processor/dal/events/event_handler.py +++ b/product/stream_processor/integrations/events/event_handler.py @@ -1,9 +1,9 @@ from typing import Any from product.models.products.product import ProductChangeNotification -from product.stream_processor.dal.events.base import EventHandler, EventProvider -from product.stream_processor.dal.events.functions import build_events_from_models -from product.stream_processor.dal.events.models.output import EventReceipt +from product.stream_processor.integrations.events.base import EventHandler, EventProvider +from product.stream_processor.integrations.events.functions import build_events_from_models +from product.stream_processor.integrations.events.models.output import EventReceipt class ProductChangeNotificationHandler(EventHandler): diff --git a/product/stream_processor/dal/events/exceptions.py b/product/stream_processor/integrations/events/exceptions.py similarity index 71% rename from product/stream_processor/dal/events/exceptions.py rename to product/stream_processor/integrations/events/exceptions.py index c77a153..1e641d5 100644 --- a/product/stream_processor/dal/events/exceptions.py +++ b/product/stream_processor/integrations/events/exceptions.py @@ -1,4 +1,4 @@ -from product.stream_processor.dal.events.models.output import EventReceiptFail +from product.stream_processor.integrations.events.models.output import EventReceiptFail class ProductNotificationDeliveryError(Exception): diff --git a/product/stream_processor/dal/events/functions.py b/product/stream_processor/integrations/events/functions.py similarity index 89% rename from product/stream_processor/dal/events/functions.py rename to product/stream_processor/integrations/events/functions.py index 981b824..e4baa03 100644 --- a/product/stream_processor/dal/events/functions.py +++ b/product/stream_processor/integrations/events/functions.py @@ -2,8 +2,8 @@ from typing import Any, Generator, Sequence, TypeVar from uuid import uuid4 -from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION -from product.stream_processor.dal.events.models.input import AnyModel, Event, EventMetadata +from product.stream_processor.integrations.events.constants import DEFAULT_EVENT_VERSION +from product.stream_processor.integrations.events.models.input import AnyModel, Event, EventMetadata T = TypeVar('T') diff --git a/product/stream_processor/dal/events/models/__init__.py b/product/stream_processor/integrations/events/models/__init__.py similarity index 100% rename from product/stream_processor/dal/events/models/__init__.py rename to product/stream_processor/integrations/events/models/__init__.py diff --git a/product/stream_processor/dal/events/models/input.py b/product/stream_processor/integrations/events/models/input.py similarity index 100% rename from product/stream_processor/dal/events/models/input.py rename to product/stream_processor/integrations/events/models/input.py diff --git a/product/stream_processor/dal/events/models/output.py b/product/stream_processor/integrations/events/models/output.py similarity index 100% rename from product/stream_processor/dal/events/models/output.py rename to product/stream_processor/integrations/events/models/output.py diff --git a/product/stream_processor/dal/events/providers/__init__.py b/product/stream_processor/integrations/events/providers/__init__.py similarity index 100% rename from product/stream_processor/dal/events/providers/__init__.py rename to product/stream_processor/integrations/events/providers/__init__.py diff --git a/product/stream_processor/dal/events/providers/eventbridge.py b/product/stream_processor/integrations/events/providers/eventbridge.py similarity index 85% rename from product/stream_processor/dal/events/providers/eventbridge.py rename to product/stream_processor/integrations/events/providers/eventbridge.py index 7efb6db..a84bc81 100644 --- a/product/stream_processor/dal/events/providers/eventbridge.py +++ b/product/stream_processor/integrations/events/providers/eventbridge.py @@ -5,12 +5,12 @@ import botocore.exceptions from product.constants import XRAY_TRACE_ID_ENV -from product.stream_processor.dal.events.base import EventProvider -from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY -from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError -from product.stream_processor.dal.events.functions import chunk_from_list -from product.stream_processor.dal.events.models.input import Event -from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptFail, EventReceiptSuccess +from product.stream_processor.integrations.events.base import EventProvider +from product.stream_processor.integrations.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY +from product.stream_processor.integrations.events.exceptions import ProductNotificationDeliveryError +from product.stream_processor.integrations.events.functions import chunk_from_list +from product.stream_processor.integrations.events.models.input import Event +from product.stream_processor.integrations.events.models.output import EventReceipt, EventReceiptFail, EventReceiptSuccess if TYPE_CHECKING: from mypy_boto3_events import EventBridgeClient diff --git a/tests/unit/stream_processor/conftest.py b/tests/unit/stream_processor/conftest.py index b903c57..2fb598f 100644 --- a/tests/unit/stream_processor/conftest.py +++ b/tests/unit/stream_processor/conftest.py @@ -3,11 +3,11 @@ from pytest_socket import disable_socket from product.models.products.product import ProductChangeNotification -from product.stream_processor.dal.events.base import EventProvider -from product.stream_processor.dal.events.event_handler import ProductChangeNotificationHandler -from product.stream_processor.dal.events.functions import build_events_from_models -from product.stream_processor.dal.events.models.input import Event -from product.stream_processor.dal.events.models.output import EventReceipt, EventReceiptSuccess +from product.stream_processor.integrations.events.base import EventProvider +from product.stream_processor.integrations.events.event_handler import ProductChangeNotificationHandler +from product.stream_processor.integrations.events.functions import build_events_from_models +from product.stream_processor.integrations.events.models.input import Event +from product.stream_processor.integrations.events.models.output import EventReceipt, EventReceiptSuccess def pytest_runtest_setup(): diff --git a/tests/unit/stream_processor/test_eventbridge_provider.py b/tests/unit/stream_processor/test_eventbridge_provider.py index df8d1a9..face696 100644 --- a/tests/unit/stream_processor/test_eventbridge_provider.py +++ b/tests/unit/stream_processor/test_eventbridge_provider.py @@ -6,10 +6,10 @@ from pydantic import BaseModel from product.constants import XRAY_TRACE_ID_ENV -from product.stream_processor.dal.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY -from product.stream_processor.dal.events.exceptions import ProductNotificationDeliveryError -from product.stream_processor.dal.events.functions import build_events_from_models -from product.stream_processor.dal.events.providers.eventbridge import EventBridge +from product.stream_processor.integrations.events.constants import EVENTBRIDGE_PROVIDER_MAX_EVENTS_ENTRY +from product.stream_processor.integrations.events.exceptions import ProductNotificationDeliveryError +from product.stream_processor.integrations.events.functions import build_events_from_models +from product.stream_processor.integrations.events.providers.eventbridge import EventBridge def test_eventbridge_build_put_events_from_event_payload(): diff --git a/tests/unit/stream_processor/test_events.py b/tests/unit/stream_processor/test_events.py index 2b58b4b..d2d4f0c 100644 --- a/tests/unit/stream_processor/test_events.py +++ b/tests/unit/stream_processor/test_events.py @@ -2,8 +2,8 @@ from pydantic import BaseModel -from product.stream_processor.dal.events.constants import DEFAULT_EVENT_VERSION -from product.stream_processor.dal.events.functions import build_events_from_models, convert_model_to_event_name +from product.stream_processor.integrations.events.constants import DEFAULT_EVENT_VERSION +from product.stream_processor.integrations.events.functions import build_events_from_models, convert_model_to_event_name def test_model_to_standard_event(): diff --git a/tests/unit/stream_processor/test_functions.py b/tests/unit/stream_processor/test_functions.py index 6fc6b22..4533eed 100644 --- a/tests/unit/stream_processor/test_functions.py +++ b/tests/unit/stream_processor/test_functions.py @@ -1,7 +1,7 @@ from pydantic import BaseModel -from product.stream_processor.dal.events.functions import build_events_from_models, chunk_from_list, convert_model_to_event_name -from product.stream_processor.dal.events.models.input import Event +from product.stream_processor.integrations.events.functions import build_events_from_models, chunk_from_list, convert_model_to_event_name +from product.stream_processor.integrations.events.models.input import Event def test_chunk_from_list_returns_empty_list_when_list_is_empty(): From 1841f95f7a90c68177b28f2df06385790a49715d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 15:46:34 +0200 Subject: [PATCH 50/58] docs(domain): add initial docstrings Signed-off-by: heitorlessa --- .../domain_logic/product_notification.py | 33 ++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index 6d08d88..40c3a66 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -2,13 +2,44 @@ from product.models.products.product import ProductChangeNotification from product.stream_processor.integrations.events.event_handler import ProductChangeNotificationHandler +from product.stream_processor.integrations.events.models.output import EventReceipt from product.stream_processor.integrations.events.providers.eventbridge import EventBridge EVENT_BUS = os.environ.get('EVENT_BUS', '') EVENT_SOURCE = 'myorg.product.product_notification' -def notify_product_updates(update: list[ProductChangeNotification], event_handler: ProductChangeNotificationHandler | None = None): +def notify_product_updates(update: list[ProductChangeNotification], event_handler: ProductChangeNotificationHandler | None = None) -> EventReceipt: + """Notify product change notifications using default or provided event handler. + + Parameters + ---------- + update : list[ProductChangeNotification] + List of product change notifications to notify. + event_handler : ProductChangeNotificationHandler | None, optional + Event handler to use for notification, by default ProductChangeNotificationHandler + + Environment variables + --------------------- + `EVENT_BUS` : Event Bus to notify product change notifications + + Examples + -------- + + **Sending a newly added product notification** + + ```python + from product.stream_processor.domain_logic.product_notification import notify_product_updates + + notification = ProductChangeNotification(product_id=product_id, status="ADDED") + receipt = notify_product_updates(update=[notification]) + ``` + + Returns + ------- + EventReceipt + Receipts for unsuccessfully and successfully published events. + """ if event_handler is None: event_handler = ProductChangeNotificationHandler(provider=EventBridge(EVENT_BUS), event_source=EVENT_SOURCE) From 9fcf867eb78fd2714978dd7246d8b2552a6c20f0 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 15:47:07 +0200 Subject: [PATCH 51/58] chore: add mkdocs and mkdocstrings for documentation Signed-off-by: heitorlessa --- docs/api/stream_processor.md | 4 ++ mkdocs.yml | 94 ++++++++++++++++++++++++++++++++++++ poetry.lock | 70 ++++++++++++++++++++++++++- pyproject.toml | 2 + 4 files changed, 169 insertions(+), 1 deletion(-) create mode 100644 docs/api/stream_processor.md create mode 100644 mkdocs.yml diff --git a/docs/api/stream_processor.md b/docs/api/stream_processor.md new file mode 100644 index 0000000..5a3e522 --- /dev/null +++ b/docs/api/stream_processor.md @@ -0,0 +1,4 @@ + +## Domain logic + +::: product.stream_processor.domain_logic.product_notification diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..abe0e6c --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,94 @@ +site_name: Serverless Python Demo OPN305 +site_description: Serverless Python Demo for re:Invent OPN305 session +site_author: Ran Isenberg and Heitor Lessa +repo_url: https://github.com/ran-isenberg/serverless-python-demo +edit_uri: edit/main/docs + +nav: + - Homepage: + - Decision log: decision_log.md + - API reference: + - Stream Processor: api/stream_processor.md + +theme: + name: material + font: + text: Ubuntu + palette: + - scheme: default + primary: deep purple + toggle: + icon: material/lightbulb + name: Switch to dark mode + - scheme: slate + primary: indigo + accent: teal + toggle: + icon: material/lightbulb-outline + name: Switch to light mode + features: + - header.autohide + - navigation.sections + - navigation.top + - navigation.instant + - navigation.indexes + - navigation.tracking + - navigation.tabs + - content.code.annotate + - content.code.copy + icon: + repo: fontawesome/brands/github + +markdown_extensions: + - admonition + - abbr + - pymdownx.tabbed: + alternate_style: true + - pymdownx.highlight: + linenums: true + - pymdownx.details + - pymdownx.snippets: + base_path: "." + check_paths: true + restrict_base_path: false + - meta + - toc: + permalink: true + toc_depth: 4 + - attr_list + - pymdownx.emoji: + emoji_index: !!python/name:materialx.emoji.twemoji + emoji_generator: !!python/name:materialx.emoji.to_svg + - pymdownx.inlinehilite + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tasklist: + custom_checkbox: true + +copyright: Copyright © 2023 Ran Isenberg and Heitor Lessa + +plugins: + - search + - mkdocstrings: + handlers: + python: + options: + docstring_style: numpy + docstring_section_style: spacy + show_source: true + heading_level: 3 + allow_inspection: true + group_by_category: true + show_category_heading: true + show_bases: true + show_docstring_examples: true + +extra_css: + - stylesheets/extra.css + +watch: + - product + - docs diff --git a/poetry.lock b/poetry.lock index c7a72d8..b412084 100644 --- a/poetry.lock +++ b/poetry.lock @@ -604,6 +604,20 @@ gitdb = ">=4.0.1,<5" [package.extras] test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"] +[[package]] +name = "griffe" +version = "0.36.4" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "griffe-0.36.4-py3-none-any.whl", hash = "sha256:4e37a723891fa774fafdd67240571801a1d90d0236562c178707e5c37fb3ebe2"}, + {file = "griffe-0.36.4.tar.gz", hash = "sha256:7b5968f5cc6446637ed0d3ded9de07d6a928f10ccb24116b1dd843635bf1994a"}, +] + +[package.dependencies] +colorama = ">=0.4" + [[package]] name = "identify" version = "2.5.29" @@ -895,6 +909,21 @@ watchdog = ">=2.0" i18n = ["babel (>=2.9.0)"] min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +[[package]] +name = "mkdocs-autorefs" +version = "0.5.0" +description = "Automatically link across pages in MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"}, + {file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"}, +] + +[package.dependencies] +Markdown = ">=3.3" +mkdocs = ">=1.1" + [[package]] name = "mkdocs-git-revision-date-plugin" version = "0.3.2" @@ -950,6 +979,45 @@ files = [ {file = "mkdocs_material_extensions-1.2.tar.gz", hash = "sha256:27e2d1ed2d031426a6e10d5ea06989d67e90bb02acd588bc5673106b5ee5eedf"}, ] +[[package]] +name = "mkdocstrings" +version = "0.23.0" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings-0.23.0-py3-none-any.whl", hash = "sha256:051fa4014dfcd9ed90254ae91de2dbb4f24e166347dae7be9a997fe16316c65e"}, + {file = "mkdocstrings-0.23.0.tar.gz", hash = "sha256:d9c6a37ffbe7c14a7a54ef1258c70b8d394e6a33a1c80832bce40b9567138d1c"}, +] + +[package.dependencies] +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.2" +mkdocs-autorefs = ">=0.3.1" +pymdown-extensions = ">=6.3" + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.7.1" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings_python-1.7.1-py3-none-any.whl", hash = "sha256:cb1651fba8423324b861fe38ce881cf56f30738770a2119f007a0a4ffcb00777"}, + {file = "mkdocstrings_python-1.7.1.tar.gz", hash = "sha256:90d838dc7861674794e3ca79f64c23c5d8fa76b9aa29db834b246771964c0881"}, +] + +[package.dependencies] +griffe = ">=0.35" +mkdocstrings = ">=0.20" + [[package]] name = "mypy" version = "1.5.1" @@ -2011,4 +2079,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11.0" -content-hash = "50159324a6c0b3a5e03ac842690fda7c227cf3a4c5f1704a07861ab8a9fd0861" +content-hash = "0f1dc94f77a850ad98d56e2188e26dc8e2837e9b1a856e1b6dd796b07dc2663d" diff --git a/pyproject.toml b/pyproject.toml index 6383955..2265c16 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,8 @@ toml = "*" [tool.poetry.group.dev.dependencies] mypy-boto3-events = "^1.28.46" pytest-socket = "^0.6.0" +mkdocstrings = "^0.23.0" +mkdocstrings-python = "^1.7.1" [tool.isort] py_version = 311 From 9c17f07a7ab9336a26227ed304109adb4f4d5115 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 16:14:41 +0200 Subject: [PATCH 52/58] docs(handlers): add docstring for process_stream Signed-off-by: heitorlessa --- docs/api/stream_processor.md | 10 +++++- .../handlers/process_stream.py | 33 ++++++++++++++++++- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/docs/api/stream_processor.md b/docs/api/stream_processor.md index 5a3e522..b0a846e 100644 --- a/docs/api/stream_processor.md +++ b/docs/api/stream_processor.md @@ -1,4 +1,12 @@ -## Domain logic +## Product notification + +### Domain logic + +Domain logic to notify product changes, e.g., `ADDED`, `REMOVED`, `UPDATED`. ::: product.stream_processor.domain_logic.product_notification + +### Lambda Handlers + +::: product.stream_processor.handlers.process_stream diff --git a/product/stream_processor/handlers/process_stream.py b/product/stream_processor/handlers/process_stream.py index 0f5f747..d9bcaf5 100644 --- a/product/stream_processor/handlers/process_stream.py +++ b/product/stream_processor/handlers/process_stream.py @@ -7,6 +7,7 @@ from product.models.products.product import ProductChangeNotification from product.stream_processor.domain_logic.product_notification import notify_product_updates from product.stream_processor.integrations.events.event_handler import ProductChangeNotificationHandler +from product.stream_processor.integrations.events.models.output import EventReceipt logger = Logger() @@ -16,7 +17,37 @@ def process_stream( event: dict[str, Any], context: LambdaContext, event_handler: ProductChangeNotificationHandler | None = None, -) -> list[ProductChangeNotification]: +) -> EventReceipt: + """Process batch of Amazon DynamoDB Stream containing product changes. + + Parameters + ---------- + event : dict[str, Any] + DynamoDB Stream event. + + See [sample](https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#events-sample-dynamodb) + context : LambdaContext + Lambda Context object. + + It is used to enrich our structured logging via Powertools for AWS Lambda. + + See [sample](https://docs.aws.amazon.com/lambda/latest/dg/python-context.html) + event_handler : ProductChangeNotificationHandler | None, optional + Event Handler to use to notify product changes, by default `ProductChangeNotificationHandler` + + Returns + ------- + EventReceipt + Receipts for unsuccessfully and successfully published events. + + Raises + ------ + + ProductNotificationDeliveryError + Partial or total failures when sending notification. It allows the stream to stop at the exact same sequence number. + + This means sending notifications are at least once. + """ # Until we create our handler product stream change input stream_records = DynamoDBStreamEvent(event) From e1f6476db53121ee94e09d34826eb5d94a255dd9 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 16:15:04 +0200 Subject: [PATCH 53/58] docs(domain): use markdown to create anchor Signed-off-by: heitorlessa --- .../stream_processor/domain_logic/product_notification.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index 40c3a66..adc198a 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -23,10 +23,9 @@ def notify_product_updates(update: list[ProductChangeNotification], event_handle --------------------- `EVENT_BUS` : Event Bus to notify product change notifications - Examples - -------- + # Examples - **Sending a newly added product notification** + Sending a newly added product notification ```python from product.stream_processor.domain_logic.product_notification import notify_product_updates From be733bc6874bd0cfe6da8aa6fb6f5ea599b4a5a1 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 16:15:20 +0200 Subject: [PATCH 54/58] docs: increase indentation to improve nav Signed-off-by: heitorlessa --- mkdocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs.yml b/mkdocs.yml index abe0e6c..133a868 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -79,7 +79,7 @@ plugins: docstring_style: numpy docstring_section_style: spacy show_source: true - heading_level: 3 + heading_level: 4 allow_inspection: true group_by_category: true show_category_heading: true From 6cc898c0a5db75e662b0586329bd79e757fb5a43 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 16:21:39 +0200 Subject: [PATCH 55/58] docs(handler): add integrations section Signed-off-by: heitorlessa --- product/stream_processor/handlers/process_stream.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/product/stream_processor/handlers/process_stream.py b/product/stream_processor/handlers/process_stream.py index d9bcaf5..74849fd 100644 --- a/product/stream_processor/handlers/process_stream.py +++ b/product/stream_processor/handlers/process_stream.py @@ -20,6 +20,7 @@ def process_stream( ) -> EventReceipt: """Process batch of Amazon DynamoDB Stream containing product changes. + Parameters ---------- event : dict[str, Any] @@ -35,6 +36,13 @@ def process_stream( event_handler : ProductChangeNotificationHandler | None, optional Event Handler to use to notify product changes, by default `ProductChangeNotificationHandler` + Integrations + ------------ + + # Domain + + * `notify_product_updates` to notify `ProductChangeNotification` changes + Returns ------- EventReceipt From bddc7b4977922ff8a0742dd1b365763dc28382e5 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 16:24:10 +0200 Subject: [PATCH 56/58] docs(domain): add integration section Signed-off-by: heitorlessa --- .../stream_processor/domain_logic/product_notification.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/product/stream_processor/domain_logic/product_notification.py b/product/stream_processor/domain_logic/product_notification.py index adc198a..7306003 100644 --- a/product/stream_processor/domain_logic/product_notification.py +++ b/product/stream_processor/domain_logic/product_notification.py @@ -34,6 +34,13 @@ def notify_product_updates(update: list[ProductChangeNotification], event_handle receipt = notify_product_updates(update=[notification]) ``` + Integrations + ------------ + + # Events + + * `ProductChangeNotificationHandler` uses `EventBridge` provider to convert and publish `ProductChangeNotification` models into events. + Returns ------- EventReceipt From 0d5728b5054dc4fb291bb0a1a1f0a907e74b4b35 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 16:44:57 +0200 Subject: [PATCH 57/58] docs(models): add Pydantic models Signed-off-by: heitorlessa --- docs/api/product_models.md | 7 +++++++ mkdocs.yml | 5 +++++ product/models/products/validators.py | 17 +++++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 docs/api/product_models.md diff --git a/docs/api/product_models.md b/docs/api/product_models.md new file mode 100644 index 0000000..a2b7194 --- /dev/null +++ b/docs/api/product_models.md @@ -0,0 +1,7 @@ +## Product models + +::: product.models.products.product + +## Validators + +::: product.models.products.validators diff --git a/mkdocs.yml b/mkdocs.yml index 133a868..7e5f85e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -9,6 +9,7 @@ nav: - Decision log: decision_log.md - API reference: - Stream Processor: api/stream_processor.md + - Product models: api/product_models.md theme: name: material @@ -85,6 +86,10 @@ plugins: show_category_heading: true show_bases: true show_docstring_examples: true + show_if_no_docstring: true + merge_init_into_class: true # avoid Class params + __init__ params + separate_signature: false + show_signature_annotations: false extra_css: - stylesheets/extra.css diff --git a/product/models/products/validators.py b/product/models/products/validators.py index 5c5ae49..6130f0f 100644 --- a/product/models/products/validators.py +++ b/product/models/products/validators.py @@ -2,6 +2,23 @@ def validate_product_id(product_id: str) -> str: + """Validates Product IDs are valid UUIDs + + Parameters + ---------- + product_id : str + Product ID as a string + + Returns + ------- + str + Validated product ID value + + Raises + ------ + ValueError + When a product ID doesn't conform with the UUID spec. + """ try: UUID(product_id, version=4) except Exception as exc: From 5ce39a355a8030dfbdcfcd769f4737d7d964130f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 4 Oct 2023 16:45:23 +0200 Subject: [PATCH 58/58] docs(stream_processor): handlers first Signed-off-by: heitorlessa --- docs/api/stream_processor.md | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/api/stream_processor.md b/docs/api/stream_processor.md index b0a846e..8f4010e 100644 --- a/docs/api/stream_processor.md +++ b/docs/api/stream_processor.md @@ -1,12 +1,16 @@ ## Product notification +### Lambda Handlers + +Process stream is connected to Amazon DynamoDB Stream that polls product changes in the product table. + +We convert them into `ProductChangeNotification` model depending on the DynamoDB Stream Event Name (e.g., `INSERT` -> `ADDED`). + +::: product.stream_processor.handlers.process_stream + ### Domain logic Domain logic to notify product changes, e.g., `ADDED`, `REMOVED`, `UPDATED`. ::: product.stream_processor.domain_logic.product_notification - -### Lambda Handlers - -::: product.stream_processor.handlers.process_stream