From 4e78f97cf7869959d69f2b338b084ba4feda1268 Mon Sep 17 00:00:00 2001 From: stewartwallace Date: Thu, 22 Apr 2021 17:57:20 +0100 Subject: [PATCH 1/7] Introducing Wave Feature to Pipelines Adding in tests for Target/Target Structure Updating User Guide updating tests --- docs/user-guide.md | 8 + .../cdk/cdk_stacks/adf_default_pipeline.py | 128 +++++---- .../tests/test_default_pipeline_type.py | 244 ++++++++++++++++++ .../shared/cdk/generate_pipeline_inputs.py | 4 +- .../adf-build/shared/schema_validation.py | 8 +- .../adf-build/shared/target.py | 19 +- .../adf-build/shared/tests/test_target.py | 241 ++++++++++++++--- 7 files changed, 552 insertions(+), 100 deletions(-) create mode 100644 src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py diff --git a/docs/user-guide.md b/docs/user-guide.md index e0a569a52..503b9163c 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -187,6 +187,14 @@ targets: name: production_step provider: ... properties: ... + - path: /my_ou/production/some_path + regions: [eu-central-1, us-west-1] + name: another_step + wave_config: + size: 30 # (Optional) This forces the pipeline to split this OU into seperate stages, each stage containing up to X accounts + exclude: + - 9999999999 # (Optional) List of accounts to exclude from this target. Currently only supports account Ids + properties: ... ``` ### Params diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py index 6ca6889c3..b43ec583f 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/adf_default_pipeline.py @@ -86,7 +86,7 @@ def generate_targets_for_pipeline(_stages, scope, stack_input): for index, targets in enumerate( stack_input["input"].get("environments", {}).get("targets", []) ): - _actions = [] + top_level_deployment_type = ( stack_input["input"] .get("default_providers", {}) @@ -102,74 +102,72 @@ def generate_targets_for_pipeline(_stages, scope, stack_input): .get("action", "") ) - for target in targets: - target_stage_override = target.get("provider") or top_level_deployment_type - if target.get("name") == "approval" or target.get("provider", "") == "approval": - _actions.extend( - [ - adf_codepipeline.Action( - name="{0}".format(target["name"]), - provider="Manual", - category="Approval", - target=target, - run_order=1, - map_params=stack_input["input"], - action_name="{0}".format(target["name"]), - ).config - ] - ) - continue + for wave_index, wave in enumerate(targets): + _actions = [] + _is_approval = ( + wave[0].get("name", "").startswith("approval") + or wave[0].get("provider", "") == "approval" + ) + _action_type_name = "approval" if _is_approval else "deployment" + _stage_name = ( + # 0th Index since step names are for entire stages not + # per target. + f"{wave[0].get('step_name')}-{wave_index}" + if wave[0].get("step_name") else f"{_action_type_name}-stage-{index + 1}-wave-{wave_index}" + ) - if "codebuild" in target_stage_override: - _actions.extend( - [ - adf_codebuild.CodeBuild( - scope, - # Use the name of the pipeline for CodeBuild - # instead of the target name as it will always - # operate from the deployment account. - "{pipeline_name}-stage-{index}".format( - pipeline_name=stack_input["input"]["name"], - index=index + 1, - ), - stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["modules"], - stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["kms"], - stack_input["input"], - target, - ).deploy - ] + for target in wave: + target_stage_override = target.get("provider") or top_level_deployment_type + if target.get("name") == "approval" or target.get("provider", "") == "approval": + _actions.extend( + [ + adf_codepipeline.Action( + name=f"wave-{wave_index}-{target.get('name')}".format(target["name"]), + provider="Manual", + category="Approval", + target=target, + run_order=1, + map_params=stack_input["input"], + action_name=f"{target.get('name')}", + ).config + ] + ) + continue + + if "codebuild" in target_stage_override: + _actions.extend( + [ + adf_codebuild.CodeBuild( + scope, + # Use the name of the pipeline for CodeBuild + # instead of the target name as it will always + # operate from the deployment account. + f"{stack_input['input']['name']}-target-{index + 1}-wave-{wave_index}", + stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["modules"], + stack_input["ssm_params"][ADF_DEPLOYMENT_REGION]["kms"], + stack_input["input"], + target, + ).deploy + ] + ) + continue + + regions = target.get("regions", []) + generate_deployment_action_per_region( + _actions, + regions, + stack_input, + target, + target_stage_override, + top_level_action, ) - continue - regions = target.get("regions", []) - generate_deployment_action_per_region( - _actions, - regions, - stack_input, - target, - target_stage_override, - top_level_action, - ) - _is_approval = ( - targets[0].get("name", "").startswith("approval") - or targets[0].get("provider", "") == "approval" - ) - _action_type_name = "approval" if _is_approval else "deployment" - _stage_name = ( - # 0th Index since step names are for entire stages not - # per target. - targets[0].get("step_name") - or "{action_type_name}-stage-{index}".format( - action_type_name=_action_type_name, - index=index + 1, - ) - ) - _stages.append( - _codepipeline.CfnPipeline.StageDeclarationProperty( - name=_stage_name, - actions=_actions, + _stages.append( + _codepipeline.CfnPipeline.StageDeclarationProperty( + name=_stage_name, + actions=_actions, + ) ) - ) def generate_deployment_action_per_region(_actions, diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py new file mode 100644 index 000000000..7af33fb26 --- /dev/null +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py @@ -0,0 +1,244 @@ +# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +# pylint: skip-file + +import pytest +from pprint import pprint + +from aws_cdk import core +from cdk_stacks.main import PipelineStack + + +def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_waves(): + region_name = "eu-central-1" + acount_id = "123456789012" + + stack_input = { + "input": {"params": {}, "default_providers": {}, "regions": {}}, + "ssm_params": {"fake-region": {}}, + } + + stack_input["input"]["name"] = "test-stack" + + stack_input["input"]["default_providers"]["source"] = { + "provider": "s3", + "properties": {"account_id": "123456789012"}, + } + stack_input["input"]["default_providers"]["build"] = { + "provider": "codebuild", + "properties": {"account_id": "123456789012"}, + } + + stack_input["ssm_params"][region_name] = { + "modules": "fake-bucket-name", + "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + } + app = core.App() + PipelineStack(app, stack_input) + + cloud_assembly = app.synth() + resources = { + k[0:-8]: v for k, v in cloud_assembly.stacks[0].template["Resources"].items() + } + code_pipeline = resources["codepipeline"] + assert code_pipeline["Type"] == "AWS::CodePipeline::Pipeline" + assert len(code_pipeline["Properties"]["Stages"]) == 2 + + source_stage = code_pipeline["Properties"]["Stages"][0] + assert len(source_stage["Actions"]) == 1 + + source_stage_action = source_stage["Actions"][0] + assert source_stage_action["ActionTypeId"]["Category"] == "Source" + assert source_stage_action["ActionTypeId"]["Owner"] == "AWS" + assert source_stage_action["ActionTypeId"]["Provider"] == "S3" + + build_stage = code_pipeline["Properties"]["Stages"][1] + build_stage_action = build_stage["Actions"][0] + assert build_stage_action["ActionTypeId"]["Category"] == "Build" + assert build_stage_action["ActionTypeId"]["Owner"] == "AWS" + assert build_stage_action["ActionTypeId"]["Provider"] == "CodeBuild" + + assert len(build_stage["Actions"]) == 1 + + region_name = "eu-central-1" + acount_id = "123456789012" + + stack_input = { + "input": { + "params": {}, + "default_providers": {"deploy": {"provider": "codedeploy"}}, + "regions": {}, + }, + "ssm_params": {"fake-region": {}}, + } + + stack_input["input"]["name"] = "test-stack" + stack_input["input"]["environments"] = { + "targets": [ + [ + [ + {"name": "account-1", "id": "001", "regions": ["eu-west-1"]}, + {"name": "account-2", "id": "002", "regions": ["eu-west-1"]}, + {"name": "account-3", "id": "003", "regions": ["eu-west-1"]}, + ], + [ + {"name": "account-4", "id": "004", "regions": ["eu-west-1"]}, + {"name": "account-5", "id": "005", "regions": ["eu-west-1"]}, + {"name": "account-6", "id": "006", "regions": ["eu-west-1"]}, + ], + ], + ] + } + + stack_input["input"]["default_providers"]["source"] = { + "provider": "codecommit", + "properties": {"account_id": "123456789012"}, + } + stack_input["input"]["default_providers"]["build"] = { + "provider": "codebuild", + "properties": {"account_id": "123456789012"}, + } + + stack_input["ssm_params"][region_name] = { + "modules": "fake-bucket-name", + "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + } + app = core.App() + PipelineStack(app, stack_input) + + cloud_assembly = app.synth() + resources = { + k[0:-8]: v for k, v in cloud_assembly.stacks[0].template["Resources"].items() + } + code_pipeline = resources["codepipeline"] + assert code_pipeline["Type"] == "AWS::CodePipeline::Pipeline" + assert len(code_pipeline["Properties"]["Stages"]) == 4 + + target_1_wave_1 = code_pipeline["Properties"]["Stages"][2] + assert target_1_wave_1["Name"] == "deployment-stage-1-wave-0" + assert len(target_1_wave_1["Actions"]) == 3 + + target_1_wave_2 = code_pipeline["Properties"]["Stages"][3] + assert target_1_wave_2["Name"] == "deployment-stage-1-wave-1" + assert len(target_1_wave_2["Actions"]) == 3 + + + +def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_waves_and_1_wave(): + region_name = "eu-central-1" + acount_id = "123456789012" + + stack_input = { + "input": {"params": {}, "default_providers": {}, "regions": {}}, + "ssm_params": {"fake-region": {}}, + } + + stack_input["input"]["name"] = "test-stack" + + stack_input["input"]["default_providers"]["source"] = { + "provider": "s3", + "properties": {"account_id": "123456789012"}, + } + stack_input["input"]["default_providers"]["build"] = { + "provider": "codebuild", + "properties": {"account_id": "123456789012"}, + } + + stack_input["ssm_params"][region_name] = { + "modules": "fake-bucket-name", + "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + } + app = core.App() + PipelineStack(app, stack_input) + + cloud_assembly = app.synth() + resources = { + k[0:-8]: v for k, v in cloud_assembly.stacks[0].template["Resources"].items() + } + code_pipeline = resources["codepipeline"] + assert code_pipeline["Type"] == "AWS::CodePipeline::Pipeline" + assert len(code_pipeline["Properties"]["Stages"]) == 2 + + source_stage = code_pipeline["Properties"]["Stages"][0] + assert len(source_stage["Actions"]) == 1 + + source_stage_action = source_stage["Actions"][0] + assert source_stage_action["ActionTypeId"]["Category"] == "Source" + assert source_stage_action["ActionTypeId"]["Owner"] == "AWS" + assert source_stage_action["ActionTypeId"]["Provider"] == "S3" + + build_stage = code_pipeline["Properties"]["Stages"][1] + build_stage_action = build_stage["Actions"][0] + assert build_stage_action["ActionTypeId"]["Category"] == "Build" + assert build_stage_action["ActionTypeId"]["Owner"] == "AWS" + assert build_stage_action["ActionTypeId"]["Provider"] == "CodeBuild" + + assert len(build_stage["Actions"]) == 1 + + region_name = "eu-central-1" + acount_id = "123456789012" + + stack_input = { + "input": { + "params": {}, + "default_providers": {"deploy": {"provider": "codedeploy"}}, + "regions": {}, + }, + "ssm_params": {"fake-region": {}}, + } + + stack_input["input"]["name"] = "test-stack" + stack_input["input"]["environments"] = { + "targets": [ + [ + [ + {"name": "account-1", "id": "001", "regions": ["eu-west-1"]}, + {"name": "account-2", "id": "002", "regions": ["eu-west-1"]}, + {"name": "account-3", "id": "003", "regions": ["eu-west-1"]}, + ], + [ + {"name": "account-4", "id": "004", "regions": ["eu-west-1"]}, + {"name": "account-5", "id": "005", "regions": ["eu-west-1"]}, + {"name": "account-6", "id": "006", "regions": ["eu-west-1"]}, + ], + ], + [[{"name": "account-7", "id": "007", "regions": ["eu-west-2"]}]], + ] + } + + stack_input["input"]["default_providers"]["source"] = { + "provider": "codecommit", + "properties": {"account_id": "123456789012"}, + } + stack_input["input"]["default_providers"]["build"] = { + "provider": "codebuild", + "properties": {"account_id": "123456789012"}, + } + + stack_input["ssm_params"][region_name] = { + "modules": "fake-bucket-name", + "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + } + app = core.App() + PipelineStack(app, stack_input) + + cloud_assembly = app.synth() + resources = { + k[0:-8]: v for k, v in cloud_assembly.stacks[0].template["Resources"].items() + } + code_pipeline = resources["codepipeline"] + assert code_pipeline["Type"] == "AWS::CodePipeline::Pipeline" + assert len(code_pipeline["Properties"]["Stages"]) == 5 + + target_1_wave_1 = code_pipeline["Properties"]["Stages"][2] + assert target_1_wave_1["Name"] == "deployment-stage-1-wave-0" + assert len(target_1_wave_1["Actions"]) == 3 + + target_1_wave_2 = code_pipeline["Properties"]["Stages"][3] + assert target_1_wave_2["Name"] == "deployment-stage-1-wave-1" + assert len(target_1_wave_2["Actions"]) == 3 + + target_2_wave_1 = code_pipeline["Properties"]["Stages"][4] + assert target_2_wave_1["Name"] == "deployment-stage-2-wave-0" + assert len(target_2_wave_1["Actions"]) == 1 \ No newline at end of file diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py index 18acbaa25..0b9761b75 100755 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/generate_pipeline_inputs.py @@ -111,8 +111,8 @@ def worker_thread(p, organizations, auto_create_repositories, deployment_map, pa pipeline.stage_regions.append(regions) pipeline_target = Target(path_or_tag, target_structure, organizations, step, regions) pipeline_target.fetch_accounts_for_target() - pipeline.template_dictionary["targets"].append( - target_structure.account_list) + + pipeline.template_dictionary["targets"].append(target.target_structure.generate_waves()) if DEPLOYMENT_ACCOUNT_REGION not in regions: pipeline.stage_regions.append(DEPLOYMENT_ACCOUNT_REGION) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py index d4ab0306f..612931012 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py @@ -296,6 +296,10 @@ int )] +TARGET_WAVE_CONFIG_SCHEME = { + Optional("size", default=50): int, +} + # Pipeline Params TARGET_SCHEMA = { @@ -305,7 +309,9 @@ Optional("name"): str, Optional("provider"): Or('lambda', 's3', 'codedeploy', 'cloudformation', 'service_catalog', 'approval', 'codebuild', 'jenkins'), Optional("properties"): Or(CODEBUILD_PROPS, JENKINS_PROPS, CLOUDFORMATION_PROPS, CODEDEPLOY_PROPS, S3_DEPLOY_PROPS, SERVICECATALOG_PROPS, LAMBDA_PROPS, APPROVAL_PROPS), - Optional("regions"): REGION_SCHEMA + Optional("regions"): REGION_SCHEMA, + Optional("exclude", default=[]): [str], + Optional("wave_config", default={"size": 50}): TARGET_WAVE_CONFIG_SCHEME } COMPLETION_TRIGGERS_SCHEMA = { "pipelines": [str] diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py index 41ad1f37c..c6b90e084 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py @@ -23,6 +23,8 @@ class TargetStructure: def __init__(self, target): self.target = TargetStructure._define_target_type(target) self.account_list = [] + self.wave_config = target.get('wave_config', {}) if isinstance(target, dict) else {} + self.exclude = target.get('exclude', []) if isinstance(target, dict) else [] @staticmethod def _define_target_type(target): @@ -45,6 +47,14 @@ def _define_target_type(target): target = [target] return target + def generate_waves(self): + waves = [] + wave_size = self.wave_config.get('size', 50) + length = len(self.account_list) + for index in range(0, length, wave_size): + yield self.account_list[index:min(index + wave_size, length)] + waves.append(self.account_list[index:min(index + wave_size, length)]) + return waves class Target: def __init__(self, path, target_structure, organizations, step, regions): @@ -83,7 +93,7 @@ def _create_response_object(self, responses): _entities = 0 for response in responses: _entities += 1 - if Target._account_is_active(response): + if Target._account_is_active(response) and not response.get('Id') in self.target_structure.exclude: self.target_structure.account_list.append( self._create_target_info( response.get('Name'), @@ -103,8 +113,11 @@ def _target_is_tags(self): responses = self.organizations.get_account_ids_for_tags(self.path) accounts = [] for response in responses: - account = self.organizations.client.describe_account(AccountId=response).get('Account') - accounts.append(account) + if response.startswith('ou-'): + accounts.extend(self.organizations.get_accounts_for_parent(response)) + else: + account = self.organizations.client.describe_account(AccountId=response).get('Account') + accounts.append(account) self._create_response_object(accounts) def _target_is_ou_id(self): diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py index fe0743a69..b1391b047 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py @@ -9,7 +9,7 @@ from pytest import fixture, raises from mock import Mock, patch from .stubs import stub_target -from ..target import Target +from ..target import Target, TargetStructure class MockTargetStructure: @@ -17,91 +17,274 @@ def __init__(self): self.account_list = [] +class MockOrgClient: + def __init__(self, return_value) -> None: + self.values = return_value + + def dir_to_ou(self, path): + return self.values + + @fixture def cls(): cls = Target( - path='/thing/path', - regions=['region1', 'region2'], + path="/thing/path", + regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={} + step={}, ) return cls + def test_account_is_active(): - assert Target._account_is_active({'Status': 'ACTIVE'}) is True - assert Target._account_is_active({'Status': 'FAKE'}) is False + assert Target._account_is_active({"Status": "ACTIVE"}) is True + assert Target._account_is_active({"Status": "FAKE"}) is False + def test_fetch_accounts_for_target_ou_path(): cls = Target( - path='/thing/path', - regions=['region1', 'region2'], + path="/thing/path", + regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={} + step={}, ) - with patch.object(cls, '_target_is_ou_path') as mock: + with patch.object(cls, "_target_is_ou_path") as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with() def test_fetch_accounts_for_target_account_id(): cls = Target( - path='123456789102', - regions=['region1', 'region2'], + path="123456789102", + regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={} + step={}, ) - with patch.object(cls, '_target_is_account_id') as mock: + with patch.object(cls, "_target_is_account_id") as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with() def test_fetch_accounts_for_target_ou_id(): cls = Target( - path='ou-123fake', - regions=['region1', 'region2'], + path="ou-123fake", + regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={} + step={}, ) - with patch.object(cls, '_target_is_ou_id') as mock: + with patch.object(cls, "_target_is_ou_id") as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with() def test_fetch_accounts_for_approval(): cls = Target( - path='approval', - regions=['region1', 'region2'], + path="approval", + regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=None, - step={} + step={}, ) - with patch.object(cls, '_target_is_approval') as mock: + with patch.object(cls, "_target_is_approval") as mock: cls.fetch_accounts_for_target() mock.assert_called_once_with() + def test_fetch_account_error(): cls = Target( - path='some_string', - regions=['region1', 'region2'], + path="some_string", + regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=Mock(), - step={} + step={}, ) with raises(InvalidDeploymentMapError): cls.fetch_accounts_for_target() + def test_fetch_account_error_invalid_account_id(): cls = Target( - path='12345678910', #11 digits rather than 12 (invalid account id) - regions=['region1', 'region2'], + path="12345678910", # 11 digits rather than 12 (invalid account id) + regions=["region1", "region2"], target_structure=MockTargetStructure(), organizations=Mock(), - step={} + step={}, ) with raises(InvalidDeploymentMapError): - cls.fetch_accounts_for_target() \ No newline at end of file + cls.fetch_accounts_for_target() + + +def test_target_scructure_respects_wave_config(): + test_target_config = {"path": "/some/random/ou", "wave_config": {"size": 2}} + target_structure = TargetStructure( + target=test_target_config, + ) + for step in target_structure.target: + target = Target( + path=test_target_config.get("path")[0], + target_structure=target_structure, + organizations=MockOrgClient( + [ + {"Name": "test-account-1", "Id": "1", "Status": "ACTIVE"}, + {"Name": "test-account-2", "Id": "2", "Status": "ACTIVE"}, + {"Name": "test-account-3", "Id": "3", "Status": "ACTIVE"}, + {"Name": "test-account-4", "Id": "4", "Status": "ACTIVE"}, + {"Name": "test-account-5", "Id": "5", "Status": "ACTIVE"}, + ] + ), + step=step, + regions=["region1"], + ) + target.fetch_accounts_for_target() + waves = list(target.target_structure.generate_waves()) + assert len(waves) == 3 + + assert len(waves[0]) == 2 + assert waves[0] == [ + { + "id": "1", + "name": "test-account-1", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + { + "id": "2", + "name": "test-account-2", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + ] + + assert len(waves[1]) == 2 + assert waves[1] == [ + { + "id": "3", + "name": "test-account-3", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + { + "id": "4", + "name": "test-account-4", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + ] + + assert len(waves[2]) == 1 + assert waves[2] == [ + { + "id": "5", + "name": "test-account-5", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + ] + + +def test_target_wave_scructure_respects_exclude_config(): + test_target_config = { + "path": "/some/random/ou", + "wave_config": {"size": 2}, + "exclude": ["5"], + } + target_structure = TargetStructure( + target=test_target_config, + ) + for step in target_structure.target: + target = Target( + path=test_target_config.get("path")[0], + target_structure=target_structure, + organizations=MockOrgClient( + [ + {"Name": "test-account-1", "Id": "1", "Status": "ACTIVE"}, + {"Name": "test-account-2", "Id": "2", "Status": "ACTIVE"}, + {"Name": "test-account-3", "Id": "3", "Status": "ACTIVE"}, + {"Name": "test-account-4", "Id": "4", "Status": "ACTIVE"}, + {"Name": "test-account-5", "Id": "5", "Status": "ACTIVE"}, + {"Name": "test-account-6", "Id": "6", "Status": "ACTIVE"}, + ] + ), + step=step, + regions=["region1"], + ) + target.fetch_accounts_for_target() + waves = list(target.target_structure.generate_waves()) + assert len(waves) == 3 + + assert len(waves[0]) == 2 + assert waves[0] == [ + { + "id": "1", + "name": "test-account-1", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + { + "id": "2", + "name": "test-account-2", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + ] + + assert len(waves[1]) == 2 + assert waves[1] == [ + { + "id": "3", + "name": "test-account-3", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + { + "id": "4", + "name": "test-account-4", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + ] + + assert len(waves[2]) == 1 + assert waves[2] == [ + { + "id": "6", + "name": "test-account-6", + "path": "/some/random/ou", + "properties": {}, + "provider": {}, + "regions": ["region1"], + "step_name": "", + }, + ] From a0dd5cb03a4b17a085ea3b6530749fec17d54a38 Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Tue, 23 Nov 2021 14:55:53 +0000 Subject: [PATCH 2/7] Removing duplicated code. Removing unused imports --- .../tests/test_default_pipeline_type.py | 104 +----------------- 1 file changed, 1 insertion(+), 103 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py index 7af33fb26..9b2f1a88a 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py @@ -3,9 +3,6 @@ # pylint: skip-file -import pytest -from pprint import pprint - from aws_cdk import core from cdk_stacks.main import PipelineStack @@ -14,56 +11,6 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_wa region_name = "eu-central-1" acount_id = "123456789012" - stack_input = { - "input": {"params": {}, "default_providers": {}, "regions": {}}, - "ssm_params": {"fake-region": {}}, - } - - stack_input["input"]["name"] = "test-stack" - - stack_input["input"]["default_providers"]["source"] = { - "provider": "s3", - "properties": {"account_id": "123456789012"}, - } - stack_input["input"]["default_providers"]["build"] = { - "provider": "codebuild", - "properties": {"account_id": "123456789012"}, - } - - stack_input["ssm_params"][region_name] = { - "modules": "fake-bucket-name", - "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", - } - app = core.App() - PipelineStack(app, stack_input) - - cloud_assembly = app.synth() - resources = { - k[0:-8]: v for k, v in cloud_assembly.stacks[0].template["Resources"].items() - } - code_pipeline = resources["codepipeline"] - assert code_pipeline["Type"] == "AWS::CodePipeline::Pipeline" - assert len(code_pipeline["Properties"]["Stages"]) == 2 - - source_stage = code_pipeline["Properties"]["Stages"][0] - assert len(source_stage["Actions"]) == 1 - - source_stage_action = source_stage["Actions"][0] - assert source_stage_action["ActionTypeId"]["Category"] == "Source" - assert source_stage_action["ActionTypeId"]["Owner"] == "AWS" - assert source_stage_action["ActionTypeId"]["Provider"] == "S3" - - build_stage = code_pipeline["Properties"]["Stages"][1] - build_stage_action = build_stage["Actions"][0] - assert build_stage_action["ActionTypeId"]["Category"] == "Build" - assert build_stage_action["ActionTypeId"]["Owner"] == "AWS" - assert build_stage_action["ActionTypeId"]["Provider"] == "CodeBuild" - - assert len(build_stage["Actions"]) == 1 - - region_name = "eu-central-1" - acount_id = "123456789012" - stack_input = { "input": { "params": {}, @@ -124,61 +71,12 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_wa assert len(target_1_wave_2["Actions"]) == 3 - def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_waves_and_1_wave(): - region_name = "eu-central-1" - acount_id = "123456789012" - - stack_input = { - "input": {"params": {}, "default_providers": {}, "regions": {}}, - "ssm_params": {"fake-region": {}}, - } - - stack_input["input"]["name"] = "test-stack" - - stack_input["input"]["default_providers"]["source"] = { - "provider": "s3", - "properties": {"account_id": "123456789012"}, - } - stack_input["input"]["default_providers"]["build"] = { - "provider": "codebuild", - "properties": {"account_id": "123456789012"}, - } - - stack_input["ssm_params"][region_name] = { - "modules": "fake-bucket-name", - "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", - } - app = core.App() - PipelineStack(app, stack_input) - - cloud_assembly = app.synth() - resources = { - k[0:-8]: v for k, v in cloud_assembly.stacks[0].template["Resources"].items() - } - code_pipeline = resources["codepipeline"] - assert code_pipeline["Type"] == "AWS::CodePipeline::Pipeline" - assert len(code_pipeline["Properties"]["Stages"]) == 2 - - source_stage = code_pipeline["Properties"]["Stages"][0] - assert len(source_stage["Actions"]) == 1 - - source_stage_action = source_stage["Actions"][0] - assert source_stage_action["ActionTypeId"]["Category"] == "Source" - assert source_stage_action["ActionTypeId"]["Owner"] == "AWS" - assert source_stage_action["ActionTypeId"]["Provider"] == "S3" - - build_stage = code_pipeline["Properties"]["Stages"][1] - build_stage_action = build_stage["Actions"][0] - assert build_stage_action["ActionTypeId"]["Category"] == "Build" - assert build_stage_action["ActionTypeId"]["Owner"] == "AWS" - assert build_stage_action["ActionTypeId"]["Provider"] == "CodeBuild" - - assert len(build_stage["Actions"]) == 1 region_name = "eu-central-1" acount_id = "123456789012" + stack_input = { "input": { "params": {}, From 5f5c7ba592c7c07d656daa43194e0d0fd6dd638d Mon Sep 17 00:00:00 2001 From: Simon Kok Date: Tue, 23 Nov 2021 18:05:56 +0100 Subject: [PATCH 3/7] Removed redundant empty lines from tests --- .../shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py index 9b2f1a88a..beb76411d 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py @@ -72,11 +72,9 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_wa def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_waves_and_1_wave(): - region_name = "eu-central-1" acount_id = "123456789012" - stack_input = { "input": { "params": {}, @@ -139,4 +137,4 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_w target_2_wave_1 = code_pipeline["Properties"]["Stages"][4] assert target_2_wave_1["Name"] == "deployment-stage-2-wave-0" - assert len(target_2_wave_1["Actions"]) == 1 \ No newline at end of file + assert len(target_2_wave_1["Actions"]) == 1 From 2f6ecf34b97e031061372808b1b2d8d430d8bb3e Mon Sep 17 00:00:00 2001 From: Simon Kok Date: Tue, 23 Nov 2021 18:25:58 +0100 Subject: [PATCH 4/7] Add user documentation on the wave configuration --- docs/user-guide.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/user-guide.md b/docs/user-guide.md index cd921f94b..eaf6fa83a 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -198,6 +198,16 @@ targets: properties: ... ``` +CodePipeline has a limit of 50 actions per stage. +A stage is identified in the above list of targets with a new entry in the array, using `-`. + +To workaround this limit, ADF will split the accounts x regions that are selected as part of one stage over multiple stages when required. +A new stage is introduced for every 50 accounts/region deployments by default. The default of 50 will make sense for most pipelines. +However, in some situations, you would like to limit the rate at which an update is rolled out to the list of accounts/regions. +This can be configured using the `wave_config/size` target property. Setting these to `30` as shown above, will introduce a new stage for every 30 accounts/regions. +If the `/my_ou/production/some_path` OU would contain 25 accounts (actually 26, but account `9999999999` is excluded by the setup above), multiplied by the two regions it targets in the last step, the total of account/region deployment actions required would be 50. +Since the configuration is set to 30, the first 30 accounts will be deployed to in the first stage. If all of these successfully deploy, the pipeline will continue to the next stage, deploying to the remaining 20 account/regions. + ### Params Pipelines also have parameters that don't relate to a specific stage but rather the pipeline as a whole. For example, a pipeline might have an single notification endpoint in which it would send a notification when it completes or fails. It also might have things such as a schedule for how often it runs. From e6d51cf246499fc053224ec198666ca33b08ee0b Mon Sep 17 00:00:00 2001 From: Stewart Wallace Date: Wed, 24 Nov 2021 09:19:47 +0000 Subject: [PATCH 5/7] rename wave_config to just wave --- docs/user-guide.md | 4 ++-- .../adf-build/shared/schema_validation.py | 4 ++-- .../bootstrap_repository/adf-build/shared/target.py | 4 ++-- .../adf-build/shared/tests/test_target.py | 6 +++--- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/user-guide.md b/docs/user-guide.md index eaf6fa83a..9d55050b7 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -191,7 +191,7 @@ targets: - path: /my_ou/production/some_path regions: [eu-central-1, us-west-1] name: another_step - wave_config: + wave: size: 30 # (Optional) This forces the pipeline to split this OU into seperate stages, each stage containing up to X accounts exclude: - 9999999999 # (Optional) List of accounts to exclude from this target. Currently only supports account Ids @@ -204,7 +204,7 @@ A stage is identified in the above list of targets with a new entry in the array To workaround this limit, ADF will split the accounts x regions that are selected as part of one stage over multiple stages when required. A new stage is introduced for every 50 accounts/region deployments by default. The default of 50 will make sense for most pipelines. However, in some situations, you would like to limit the rate at which an update is rolled out to the list of accounts/regions. -This can be configured using the `wave_config/size` target property. Setting these to `30` as shown above, will introduce a new stage for every 30 accounts/regions. +This can be configured using the `wave/size` target property. Setting these to `30` as shown above, will introduce a new stage for every 30 accounts/regions. If the `/my_ou/production/some_path` OU would contain 25 accounts (actually 26, but account `9999999999` is excluded by the setup above), multiplied by the two regions it targets in the last step, the total of account/region deployment actions required would be 50. Since the configuration is set to 30, the first 30 accounts will be deployed to in the first stage. If all of these successfully deploy, the pipeline will continue to the next stage, deploying to the remaining 20 account/regions. diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py index 933f9300f..9f7b516ce 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py @@ -302,7 +302,7 @@ int )] -TARGET_WAVE_CONFIG_SCHEME = { +TARGET_wave_SCHEME = { Optional("size", default=50): int, } @@ -317,7 +317,7 @@ Optional("properties"): Or(CODEBUILD_PROPS, JENKINS_PROPS, CLOUDFORMATION_PROPS, CODEDEPLOY_PROPS, S3_DEPLOY_PROPS, SERVICECATALOG_PROPS, LAMBDA_PROPS, APPROVAL_PROPS), Optional("regions"): REGION_SCHEMA, Optional("exclude", default=[]): [str], - Optional("wave_config", default={"size": 50}): TARGET_WAVE_CONFIG_SCHEME + Optional("wave", default={"size": 50}): TARGET_wave_SCHEME } COMPLETION_TRIGGERS_SCHEMA = { "pipelines": [str] diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py index c6b90e084..4af527752 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/target.py @@ -23,7 +23,7 @@ class TargetStructure: def __init__(self, target): self.target = TargetStructure._define_target_type(target) self.account_list = [] - self.wave_config = target.get('wave_config', {}) if isinstance(target, dict) else {} + self.wave = target.get('wave', {}) if isinstance(target, dict) else {} self.exclude = target.get('exclude', []) if isinstance(target, dict) else [] @staticmethod @@ -49,7 +49,7 @@ def _define_target_type(target): def generate_waves(self): waves = [] - wave_size = self.wave_config.get('size', 50) + wave_size = self.wave.get('size', 50) length = len(self.account_list) for index in range(0, length, wave_size): yield self.account_list[index:min(index + wave_size, length)] diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py index b1391b047..04de79739 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/tests/test_target.py @@ -119,8 +119,8 @@ def test_fetch_account_error_invalid_account_id(): cls.fetch_accounts_for_target() -def test_target_scructure_respects_wave_config(): - test_target_config = {"path": "/some/random/ou", "wave_config": {"size": 2}} +def test_target_scructure_respects_wave(): + test_target_config = {"path": "/some/random/ou", "wave": {"size": 2}} target_structure = TargetStructure( target=test_target_config, ) @@ -205,7 +205,7 @@ def test_target_scructure_respects_wave_config(): def test_target_wave_scructure_respects_exclude_config(): test_target_config = { "path": "/some/random/ou", - "wave_config": {"size": 2}, + "wave": {"size": 2}, "exclude": ["5"], } target_structure = TargetStructure( From 1d7308388b2378e0c14ebd5886bc0ab8ad5bdad3 Mon Sep 17 00:00:00 2001 From: Simon Kok Date: Wed, 24 Nov 2021 11:33:29 +0100 Subject: [PATCH 6/7] Fix schema upper case issue of TARGET_WAVE_SCHEME --- .../adf-build/shared/schema_validation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py index 9f7b516ce..f71475864 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/schema_validation.py @@ -302,7 +302,7 @@ int )] -TARGET_wave_SCHEME = { +TARGET_WAVE_SCHEME = { Optional("size", default=50): int, } @@ -317,7 +317,7 @@ Optional("properties"): Or(CODEBUILD_PROPS, JENKINS_PROPS, CLOUDFORMATION_PROPS, CODEDEPLOY_PROPS, S3_DEPLOY_PROPS, SERVICECATALOG_PROPS, LAMBDA_PROPS, APPROVAL_PROPS), Optional("regions"): REGION_SCHEMA, Optional("exclude", default=[]): [str], - Optional("wave", default={"size": 50}): TARGET_wave_SCHEME + Optional("wave", default={"size": 50}): TARGET_WAVE_SCHEME } COMPLETION_TRIGGERS_SCHEMA = { "pipelines": [str] From d9ba2925387247c2a9f6e9c9e90ffab4d6f20c41 Mon Sep 17 00:00:00 2001 From: Simon Kok Date: Wed, 24 Nov 2021 12:23:38 +0100 Subject: [PATCH 7/7] Fix account_id typo in tests --- .../cdk/cdk_stacks/tests/test_default_pipeline_type.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py index beb76411d..255a8f796 100644 --- a/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py +++ b/src/lambda_codebase/initial_commit/bootstrap_repository/adf-build/shared/cdk/cdk_stacks/tests/test_default_pipeline_type.py @@ -9,7 +9,7 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_waves(): region_name = "eu-central-1" - acount_id = "123456789012" + account_id = "123456789012" stack_input = { "input": { @@ -49,7 +49,7 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_wa stack_input["ssm_params"][region_name] = { "modules": "fake-bucket-name", - "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + "kms": f"arn:aws:kms:{region_name}:{account_id}:key/my-unique-kms-key-id", } app = core.App() PipelineStack(app, stack_input) @@ -73,7 +73,7 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_1_target_with_2_wa def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_waves_and_1_wave(): region_name = "eu-central-1" - acount_id = "123456789012" + account_id = "123456789012" stack_input = { "input": { @@ -114,7 +114,7 @@ def test_pipeline_creation_outputs_as_expected_when_input_has_2_targets_with_2_w stack_input["ssm_params"][region_name] = { "modules": "fake-bucket-name", - "kms": f"arn:aws:kms:{region_name}:{acount_id}:key/my-unique-kms-key-id", + "kms": f"arn:aws:kms:{region_name}:{account_id}:key/my-unique-kms-key-id", } app = core.App() PipelineStack(app, stack_input)