diff --git a/.gitallowed b/.gitallowed index c04347c9bf4a6..ed8c612466b3d 100644 --- a/.gitallowed +++ b/.gitallowed @@ -5,6 +5,7 @@ account: '234567890123' # Account patterns used in the README account: '000000000000' account: '111111111111' +account: '222222222222' account: '333333333333' # used in physical names tests in @aws-cdk/core diff --git a/CHANGELOG.md b/CHANGELOG.md index ce0459cfaf7c8..7411a92e30a23 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,23 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [1.51.0](https://github.com/aws/aws-cdk/compare/v1.50.0...v1.51.0) (2020-07-09) + + +### Features + +* **cloudfront:** Add connectionAttempts, connectionTimeout in origin configuration ([#8573](https://github.com/aws/aws-cdk/issues/8573)) ([84b923f](https://github.com/aws/aws-cdk/commit/84b923fb853d674e0a07f4296f2b23800d139366)), closes [#8572](https://github.com/aws/aws-cdk/issues/8572) +* Developer Preview of CDK Pipelines ([#8868](https://github.com/aws/aws-cdk/issues/8868)) ([d2609bd](https://github.com/aws/aws-cdk/commit/d2609bdbd0ba0347ff617267e928a2b54482e78a)), closes [aws/aws-cdk-rfcs#49](https://github.com/aws/aws-cdk-rfcs/issues/49) + + +### Bug Fixes + +* **appmesh:** Update enums for appmesh ([#8716](https://github.com/aws/aws-cdk/issues/8716)) ([64e3d88](https://github.com/aws/aws-cdk/commit/64e3d888a66da84c066298564ad2875cb93bfd27)) +* **cli:** Python sample app template does not follow PEP8 ([#8936](https://github.com/aws/aws-cdk/issues/8936)) ([0717919](https://github.com/aws/aws-cdk/commit/07179194d8fc4e3beaeafbe6cf04a2f3d1addd2c)) +* **codepipeline:** set correct header assignment in S3 deployment cache control ([#8864](https://github.com/aws/aws-cdk/issues/8864)) ([be1094b](https://github.com/aws/aws-cdk/commit/be1094b4f4ef1eb194333faaf804db610535fea1)), closes [#8774](https://github.com/aws/aws-cdk/issues/8774) +* **ec2:** VpcEndpoint AZ lookup fails for AWS services ([#8386](https://github.com/aws/aws-cdk/issues/8386)) ([54e5c36](https://github.com/aws/aws-cdk/commit/54e5c3658241320244ae3055ec3ef7ca18926001)) +* **iam:** cannot import service role with a principal in its path ([#8692](https://github.com/aws/aws-cdk/issues/8692)) ([55eb7d7](https://github.com/aws/aws-cdk/commit/55eb7d794450702e540246819f622a2bba22380e)), closes [#8691](https://github.com/aws/aws-cdk/issues/8691) + ## [1.50.0](https://github.com/aws/aws-cdk/compare/v1.49.1...v1.50.0) (2020-07-07) diff --git a/lerna.json b/lerna.json index 57f402c732b90..0a02acca2ae0c 100644 --- a/lerna.json +++ b/lerna.json @@ -10,5 +10,5 @@ "tools/*" ], "rejectCycles": "true", - "version": "1.50.0" + "version": "1.51.0" } diff --git a/packages/@aws-cdk/app-delivery/README.md b/packages/@aws-cdk/app-delivery/README.md index fee307ffeb6fb..85e378002e5d1 100644 --- a/packages/@aws-cdk/app-delivery/README.md +++ b/packages/@aws-cdk/app-delivery/README.md @@ -2,9 +2,9 @@ --- -![cdk-constructs: Experimental](https://img.shields.io/badge/cdk--constructs-experimental-important.svg?style=for-the-badge) +![Deprecated](https://img.shields.io/badge/deprecated-critical.svg?style=for-the-badge) -> The APIs of higher level constructs in this module are experimental and under active development. They are subject to non-backward compatible changes or removal in any future version. These are not subject to the [Semantic Versioning](https://semver.org/) model and breaking changes will be announced in the release notes. This means that while you may use them, you may need to update your source code when upgrading to a newer version of this package. +> This API may emit warnings. Backward compatibility is not guaranteed. --- @@ -13,6 +13,13 @@ This library includes a *CodePipeline* composite Action for deploying AWS CDK Ap This module is part of the [AWS Cloud Development Kit](https://github.com/aws/aws-cdk) project. + +# Replacement recommended + +This library has been deprecated. We recommend you use the +[@aws-cdk/pipelines](https://docs.aws.amazon.com/cdk/api/latest/docs/pipelines.html) module instead. + + ### Limitations The construct library in it's current form has the following limitations: 1. It can only deploy stacks that are hosted in the same AWS account and region as the *CodePipeline* is. diff --git a/packages/@aws-cdk/app-delivery/package.json b/packages/@aws-cdk/app-delivery/package.json index d1f6bedf06839..016b2a151ff1e 100644 --- a/packages/@aws-cdk/app-delivery/package.json +++ b/packages/@aws-cdk/app-delivery/package.json @@ -57,7 +57,7 @@ "@types/nodeunit": "^0.0.31", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", - "fast-check": "^1.25.1", + "fast-check": "^1.26.0", "nodeunit": "^0.11.3", "pkglint": "0.0.0" }, @@ -92,8 +92,8 @@ "engines": { "node": ">= 10.13.0 <13 || >=13.7.0" }, - "stability": "experimental", - "maturity": "experimental", + "stability": "deprecated", + "maturity": "deprecated", "nyc": { "statements": 75 }, diff --git a/packages/@aws-cdk/aws-applicationautoscaling/package.json b/packages/@aws-cdk/aws-applicationautoscaling/package.json index 64ede897c926b..51bc9b8813d45 100644 --- a/packages/@aws-cdk/aws-applicationautoscaling/package.json +++ b/packages/@aws-cdk/aws-applicationautoscaling/package.json @@ -66,7 +66,7 @@ "@types/nodeunit": "^0.0.31", "cdk-build-tools": "0.0.0", "cfn2ts": "0.0.0", - "fast-check": "^1.25.1", + "fast-check": "^1.26.0", "nodeunit": "^0.11.3", "pkglint": "0.0.0" }, diff --git a/packages/@aws-cdk/aws-appmesh/lib/shared-interfaces.ts b/packages/@aws-cdk/aws-appmesh/lib/shared-interfaces.ts index 34f2001c62e36..c89d9b9f6635b 100644 --- a/packages/@aws-cdk/aws-appmesh/lib/shared-interfaces.ts +++ b/packages/@aws-cdk/aws-appmesh/lib/shared-interfaces.ts @@ -6,6 +6,8 @@ import { Duration } from '@aws-cdk/core'; export enum Protocol { HTTP = 'http', TCP = 'tcp', + HTTP2 = 'http2', + GRPC = 'grpc', } /** diff --git a/packages/@aws-cdk/aws-appmesh/lib/virtual-node.ts b/packages/@aws-cdk/aws-appmesh/lib/virtual-node.ts index 63c97392473c0..f7a2548a48e7b 100644 --- a/packages/@aws-cdk/aws-appmesh/lib/virtual-node.ts +++ b/packages/@aws-cdk/aws-appmesh/lib/virtual-node.ts @@ -167,6 +167,10 @@ function renderHealthCheck(hc: HealthCheck | undefined, pm: PortMapping): CfnVir throw new Error('The path property cannot be set with Protocol.TCP'); } + if (hc.protocol === Protocol.GRPC && hc.path) { + throw new Error('The path property cannot be set with Protocol.GRPC'); + } + const healthCheck: CfnVirtualNode.HealthCheckProperty = { healthyThreshold: hc.healthyThreshold || 2, intervalMillis: (hc.interval || cdk.Duration.seconds(5)).toMilliseconds(), // min diff --git a/packages/@aws-cdk/aws-appmesh/package.json b/packages/@aws-cdk/aws-appmesh/package.json index 3bbc70c2941d8..d917e0ffccb98 100644 --- a/packages/@aws-cdk/aws-appmesh/package.json +++ b/packages/@aws-cdk/aws-appmesh/package.json @@ -134,7 +134,9 @@ "props-default-doc:@aws-cdk/aws-appmesh.VirtualRouterAttributes.meshName", "props-default-doc:@aws-cdk/aws-appmesh.VirtualRouterAttributes.virtualRouterArn", "props-default-doc:@aws-cdk/aws-appmesh.VirtualRouterAttributes.virtualRouterName", - "docs-public-apis:@aws-cdk/aws-appmesh.Protocol.HTTP" + "docs-public-apis:@aws-cdk/aws-appmesh.Protocol.HTTP", + "docs-public-apis:@aws-cdk/aws-appmesh.Protocol.HTTP2", + "docs-public-apis:@aws-cdk/aws-appmesh.Protocol.GRPC" ] }, "stability": "experimental", diff --git a/packages/@aws-cdk/aws-appmesh/test/test.health-check.ts b/packages/@aws-cdk/aws-appmesh/test/test.health-check.ts index 9f877fd26dd6a..a89ba66afb3db 100644 --- a/packages/@aws-cdk/aws-appmesh/test/test.health-check.ts +++ b/packages/@aws-cdk/aws-appmesh/test/test.health-check.ts @@ -128,4 +128,24 @@ export = { test.done(); }, + + 'throws if path and Protocol.GRPC'(test: Test) { + // GIVEN + const stack = new cdk.Stack(); + + // WHEN + const toThrow = (protocol: appmesh.Protocol) => getNode(stack).addListeners({ + healthCheck: { + protocol, + path: '/', + }, + }); + + // THEN + test.doesNotThrow(() => toThrow(appmesh.Protocol.HTTP)); + test.throws(() => toThrow(appmesh.Protocol.GRPC), /The path property cannot be set with Protocol.GRPC/); + + test.done(); + }, + }; diff --git a/packages/@aws-cdk/aws-appsync/README.md b/packages/@aws-cdk/aws-appsync/README.md index 378a5a4c8bd7d..e815444352288 100644 --- a/packages/@aws-cdk/aws-appsync/README.md +++ b/packages/@aws-cdk/aws-appsync/README.md @@ -111,6 +111,7 @@ export class ApiStack extends Stack { type: AttributeType.STRING, }, }); + // If your table is already created you can also use use import table and use it as data source. const customerDS = api.addDynamoDbDataSource('Customer', 'The customer data source', customerTable); customerDS.createResolver({ typeName: 'Query', diff --git a/packages/@aws-cdk/aws-appsync/lib/data-source.ts b/packages/@aws-cdk/aws-appsync/lib/data-source.ts new file mode 100644 index 0000000000000..0daf1f996a452 --- /dev/null +++ b/packages/@aws-cdk/aws-appsync/lib/data-source.ts @@ -0,0 +1,251 @@ +import { ITable } from '@aws-cdk/aws-dynamodb'; +import { IGrantable, IPrincipal, IRole, Role, ServicePrincipal } from '@aws-cdk/aws-iam'; +import { IFunction } from '@aws-cdk/aws-lambda'; +import { Construct, IResolvable } from '@aws-cdk/core'; +import { CfnDataSource } from './appsync.generated'; +import { GraphQLApi } from './graphqlapi'; +import { BaseResolverProps, Resolver } from './resolver'; + +/** + * Base properties for an AppSync datasource + */ +export interface BaseDataSourceProps { + /** + * The API to attach this data source to + */ + readonly api: GraphQLApi; + /** + * The name of the data source + */ + readonly name: string; + /** + * the description of the data source + * + * @default - None + */ + readonly description?: string; +} + +/** + * properties for an AppSync datasource backed by a resource + */ +export interface BackedDataSourceProps extends BaseDataSourceProps { + /** + * The IAM service role to be assumed by AppSync to interact with the data source + * + * @default - Create a new role + */ + readonly serviceRole?: IRole; +} + +/** + * props used by implementations of BaseDataSource to provide configuration. Should not be used directly. + */ +export interface ExtendedDataSourceProps { + /** + * the type of the AppSync datasource + */ + readonly type: string; + /** + * configuration for DynamoDB Datasource + * + * @default - No config + */ + readonly dynamoDbConfig?: CfnDataSource.DynamoDBConfigProperty | IResolvable; + /** + * configuration for Elasticsearch Datasource + * + * @default - No config + */ + readonly elasticsearchConfig?: CfnDataSource.ElasticsearchConfigProperty | IResolvable; + /** + * configuration for HTTP Datasource + * + * @default - No config + */ + readonly httpConfig?: CfnDataSource.HttpConfigProperty | IResolvable; + /** + * configuration for Lambda Datasource + * + * @default - No config + */ + readonly lambdaConfig?: CfnDataSource.LambdaConfigProperty | IResolvable; + /** + * configuration for RDS Datasource + * + * @default - No config + */ + readonly relationalDatabaseConfig?: CfnDataSource.RelationalDatabaseConfigProperty | IResolvable; +} + +/** + * Abstract AppSync datasource implementation. Do not use directly but use subclasses for concrete datasources + */ +export abstract class BaseDataSource extends Construct { + /** + * the name of the data source + */ + public readonly name: string; + /** + * the underlying CFN data source resource + */ + public readonly ds: CfnDataSource; + + protected api: GraphQLApi; + protected serviceRole?: IRole; + + constructor(scope: Construct, id: string, props: BackedDataSourceProps, extended: ExtendedDataSourceProps) { + super(scope, id); + + if (extended.type !== 'NONE') { + this.serviceRole = props.serviceRole || new Role(this, 'ServiceRole', { assumedBy: new ServicePrincipal('appsync') }); + } + + this.ds = new CfnDataSource(this, 'Resource', { + apiId: props.api.apiId, + name: props.name, + description: props.description, + serviceRoleArn: this.serviceRole?.roleArn, + ...extended, + }); + this.name = props.name; + this.api = props.api; + } + + /** + * creates a new resolver for this datasource and API using the given properties + */ + public createResolver(props: BaseResolverProps): Resolver { + return new Resolver(this, `${props.typeName}${props.fieldName}Resolver`, { + api: this.api, + dataSource: this, + ...props, + }); + } +} + +/** + * Abstract AppSync datasource implementation. Do not use directly but use subclasses for resource backed datasources + */ +export abstract class BackedDataSource extends BaseDataSource implements IGrantable { + /** + * the principal of the data source to be IGrantable + */ + public readonly grantPrincipal: IPrincipal; + + constructor(scope: Construct, id: string, props: BackedDataSourceProps, extended: ExtendedDataSourceProps) { + super(scope, id, props, extended); + + this.grantPrincipal = this.serviceRole!; + } +} + +/** + * Properties for an AppSync dummy datasource + */ +export interface NoneDataSourceProps extends BaseDataSourceProps { +} + +/** + * An AppSync dummy datasource + */ +export class NoneDataSource extends BaseDataSource { + constructor(scope: Construct, id: string, props: NoneDataSourceProps) { + super(scope, id, props, { + type: 'NONE', + }); + } +} + +/** + * Properties for an AppSync DynamoDB datasource + */ +export interface DynamoDbDataSourceProps extends BackedDataSourceProps { + /** + * The DynamoDB table backing this data source + * [disable-awslint:ref-via-interface] + */ + readonly table: ITable; + /** + * Specify whether this DS is read only or has read and write permissions to the DynamoDB table + * + * @default false + */ + readonly readOnlyAccess?: boolean; + /** + * use credentials of caller to access DynamoDB + * + * @default false + */ + readonly useCallerCredentials?: boolean; +} + +/** + * An AppSync datasource backed by a DynamoDB table + */ +export class DynamoDbDataSource extends BackedDataSource { + constructor(scope: Construct, id: string, props: DynamoDbDataSourceProps) { + super(scope, id, props, { + type: 'AMAZON_DYNAMODB', + dynamoDbConfig: { + tableName: props.table.tableName, + awsRegion: props.table.stack.region, + useCallerCredentials: props.useCallerCredentials, + }, + }); + if (props.readOnlyAccess) { + props.table.grantReadData(this); + } else { + props.table.grantReadWriteData(this); + } + } +} + +/** + * Properties for an AppSync http datasource + */ +export interface HttpDataSourceProps extends BaseDataSourceProps { + /** + * The http endpoint + */ + readonly endpoint: string; +} + +/** + * An AppSync datasource backed by a http endpoint + */ +export class HttpDataSource extends BaseDataSource { + constructor(scope: Construct, id: string, props: HttpDataSourceProps) { + super(scope, id, props, { + httpConfig: { + endpoint: props.endpoint, + }, + type: 'HTTP', + }); + } +} + +/** + * Properties for an AppSync Lambda datasource + */ +export interface LambdaDataSourceProps extends BackedDataSourceProps { + /** + * The Lambda function to call to interact with this data source + */ + readonly lambdaFunction: IFunction; +} + +/** + * An AppSync datasource backed by a Lambda function + */ +export class LambdaDataSource extends BackedDataSource { + constructor(scope: Construct, id: string, props: LambdaDataSourceProps) { + super(scope, id, props, { + type: 'AWS_LAMBDA', + lambdaConfig: { + lambdaFunctionArn: props.lambdaFunction.functionArn, + }, + }); + props.lambdaFunction.grantInvoke(this); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts b/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts index bbc92625c5749..e5683ef98d9d6 100644 --- a/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts +++ b/packages/@aws-cdk/aws-appsync/lib/graphqlapi.ts @@ -1,9 +1,6 @@ import { IUserPool } from '@aws-cdk/aws-cognito'; -import { Table } from '@aws-cdk/aws-dynamodb'; +import { ITable } from '@aws-cdk/aws-dynamodb'; import { - IGrantable, - IPrincipal, - IRole, ManagedPolicy, Role, ServicePrincipal, @@ -13,11 +10,10 @@ import { Construct, Duration, IResolvable } from '@aws-cdk/core'; import { readFileSync } from 'fs'; import { CfnApiKey, - CfnDataSource, CfnGraphQLApi, CfnGraphQLSchema, - CfnResolver, } from './appsync.generated'; +import { DynamoDbDataSource, HttpDataSource, LambdaDataSource, NoneDataSource } from './data-source'; /** * enum with all possible values for AppSync authorization type @@ -393,7 +389,7 @@ export class GraphQLApi extends Construct { public addDynamoDbDataSource( name: string, description: string, - table: Table, + table: ITable, ): DynamoDbDataSource { return new DynamoDbDataSource(this, `${name}DS`, { api: this, @@ -570,817 +566,3 @@ export class GraphQLApi extends Construct { return authModes ? this.formatAdditionalAuthorizationModes(authModes) : undefined; } } - -/** - * Base properties for an AppSync datasource - */ -export interface BaseDataSourceProps { - /** - * The API to attach this data source to - */ - readonly api: GraphQLApi; - /** - * The name of the data source - */ - readonly name: string; - /** - * the description of the data source - * - * @default - None - */ - readonly description?: string; -} - -/** - * properties for an AppSync datasource backed by a resource - */ -export interface BackedDataSourceProps extends BaseDataSourceProps { - /** - * The IAM service role to be assumed by AppSync to interact with the data source - * - * @default - Create a new role - */ - readonly serviceRole?: IRole; -} - -/** - * props used by implementations of BaseDataSource to provide configuration. Should not be used directly. - */ -export interface ExtendedDataSourceProps { - /** - * the type of the AppSync datasource - */ - readonly type: string; - /** - * configuration for DynamoDB Datasource - * - * @default - No config - */ - readonly dynamoDbConfig?: CfnDataSource.DynamoDBConfigProperty | IResolvable; - /** - * configuration for Elasticsearch Datasource - * - * @default - No config - */ - readonly elasticsearchConfig?: CfnDataSource.ElasticsearchConfigProperty | IResolvable; - /** - * configuration for HTTP Datasource - * - * @default - No config - */ - readonly httpConfig?: CfnDataSource.HttpConfigProperty | IResolvable; - /** - * configuration for Lambda Datasource - * - * @default - No config - */ - readonly lambdaConfig?: CfnDataSource.LambdaConfigProperty | IResolvable; - /** - * configuration for RDS Datasource - * - * @default - No config - */ - readonly relationalDatabaseConfig?: CfnDataSource.RelationalDatabaseConfigProperty | IResolvable; -} - -/** - * Abstract AppSync datasource implementation. Do not use directly but use subclasses for concrete datasources - */ -export abstract class BaseDataSource extends Construct { - /** - * the name of the data source - */ - public readonly name: string; - /** - * the underlying CFN data source resource - */ - public readonly ds: CfnDataSource; - - protected api: GraphQLApi; - protected serviceRole?: IRole; - - constructor(scope: Construct, id: string, props: BackedDataSourceProps, extended: ExtendedDataSourceProps) { - super(scope, id); - - if (extended.type !== 'NONE') { - this.serviceRole = props.serviceRole || new Role(this, 'ServiceRole', { assumedBy: new ServicePrincipal('appsync') }); - } - - this.ds = new CfnDataSource(this, 'Resource', { - apiId: props.api.apiId, - name: props.name, - description: props.description, - serviceRoleArn: this.serviceRole?.roleArn, - ...extended, - }); - this.name = props.name; - this.api = props.api; - } - - /** - * creates a new resolver for this datasource and API using the given properties - */ - public createResolver(props: BaseResolverProps): Resolver { - return new Resolver(this, `${props.typeName}${props.fieldName}Resolver`, { - api: this.api, - dataSource: this, - ...props, - }); - } -} - -/** - * Abstract AppSync datasource implementation. Do not use directly but use subclasses for resource backed datasources - */ -export abstract class BackedDataSource extends BaseDataSource implements IGrantable { - /** - * the principal of the data source to be IGrantable - */ - public readonly grantPrincipal: IPrincipal; - - constructor(scope: Construct, id: string, props: BackedDataSourceProps, extended: ExtendedDataSourceProps) { - super(scope, id, props, extended); - - this.grantPrincipal = this.serviceRole!; - } -} - -/** - * Properties for an AppSync dummy datasource - */ -export interface NoneDataSourceProps extends BaseDataSourceProps { -} - -/** - * An AppSync dummy datasource - */ -export class NoneDataSource extends BaseDataSource { - constructor(scope: Construct, id: string, props: NoneDataSourceProps) { - super(scope, id, props, { - type: 'NONE', - }); - } -} - -/** - * Properties for an AppSync DynamoDB datasource - */ -export interface DynamoDbDataSourceProps extends BackedDataSourceProps { - /** - * The DynamoDB table backing this data source - * [disable-awslint:ref-via-interface] - */ - readonly table: Table; - /** - * Specify whether this DS is read only or has read and write permissions to the DynamoDB table - * - * @default false - */ - readonly readOnlyAccess?: boolean; - /** - * use credentials of caller to access DynamoDB - * - * @default false - */ - readonly useCallerCredentials?: boolean; -} - -/** - * An AppSync datasource backed by a DynamoDB table - */ -export class DynamoDbDataSource extends BackedDataSource { - constructor(scope: Construct, id: string, props: DynamoDbDataSourceProps) { - super(scope, id, props, { - type: 'AMAZON_DYNAMODB', - dynamoDbConfig: { - tableName: props.table.tableName, - awsRegion: props.table.stack.region, - useCallerCredentials: props.useCallerCredentials, - }, - }); - if (props.readOnlyAccess) { - props.table.grantReadData(this); - } else { - props.table.grantReadWriteData(this); - } - } -} - -/** - * Properties for an AppSync http datasource - */ -export interface HttpDataSourceProps extends BaseDataSourceProps { - /** - * The http endpoint - */ - readonly endpoint: string; -} - -/** - * An AppSync datasource backed by a http endpoint - */ -export class HttpDataSource extends BaseDataSource { - constructor(scope: Construct, id: string, props: HttpDataSourceProps) { - super(scope, id, props, { - httpConfig: { - endpoint: props.endpoint, - }, - type: 'HTTP', - }); - } -} - -/** - * Properties for an AppSync Lambda datasource - */ -export interface LambdaDataSourceProps extends BackedDataSourceProps { - /** - * The Lambda function to call to interact with this data source - */ - readonly lambdaFunction: IFunction; -} - -/** - * An AppSync datasource backed by a Lambda function - */ -export class LambdaDataSource extends BackedDataSource { - constructor(scope: Construct, id: string, props: LambdaDataSourceProps) { - super(scope, id, props, { - type: 'AWS_LAMBDA', - lambdaConfig: { - lambdaFunctionArn: props.lambdaFunction.functionArn, - }, - }); - props.lambdaFunction.grantInvoke(this); - } -} - -function concatAndDedup(left: T[], right: T[]): T[] { - return left.concat(right).filter((elem, index, self) => { - return index === self.indexOf(elem); - }); -} - -/** - * Utility class to represent DynamoDB key conditions. - */ -abstract class BaseKeyCondition { - public and(cond: BaseKeyCondition): BaseKeyCondition { - return new (class extends BaseKeyCondition { - constructor(private readonly left: BaseKeyCondition, private readonly right: BaseKeyCondition) { - super(); - } - - public renderCondition(): string { - return `${this.left.renderCondition()} AND ${this.right.renderCondition()}`; - } - - public keyNames(): string[] { - return concatAndDedup(this.left.keyNames(), this.right.keyNames()); - } - - public args(): string[] { - return concatAndDedup(this.left.args(), this.right.args()); - } - })(this, cond); - } - - public renderExpressionNames(): string { - return this.keyNames() - .map((keyName: string) => { - return `"#${keyName}" : "${keyName}"`; - }) - .join(', '); - } - - public renderExpressionValues(): string { - return this.args() - .map((arg: string) => { - return `":${arg}" : $util.dynamodb.toDynamoDBJson($ctx.args.${arg})`; - }) - .join(', '); - } - - public abstract renderCondition(): string; - public abstract keyNames(): string[]; - public abstract args(): string[]; -} - -/** - * Utility class to represent DynamoDB "begins_with" key conditions. - */ -class BeginsWith extends BaseKeyCondition { - constructor(private readonly keyName: string, private readonly arg: string) { - super(); - } - - public renderCondition(): string { - return `begins_with(#${this.keyName}, :${this.arg})`; - } - - public keyNames(): string[] { - return [this.keyName]; - } - - public args(): string[] { - return [this.arg]; - } -} - -/** - * Utility class to represent DynamoDB binary key conditions. - */ -class BinaryCondition extends BaseKeyCondition { - constructor(private readonly keyName: string, private readonly op: string, private readonly arg: string) { - super(); - } - - public renderCondition(): string { - return `#${this.keyName} ${this.op} :${this.arg}`; - } - - public keyNames(): string[] { - return [this.keyName]; - } - - public args(): string[] { - return [this.arg]; - } -} - -/** - * Utility class to represent DynamoDB "between" key conditions. - */ -class Between extends BaseKeyCondition { - constructor(private readonly keyName: string, private readonly arg1: string, private readonly arg2: string) { - super(); - } - - public renderCondition(): string { - return `#${this.keyName} BETWEEN :${this.arg1} AND :${this.arg2}`; - } - - public keyNames(): string[] { - return [this.keyName]; - } - - public args(): string[] { - return [this.arg1, this.arg2]; - } -} - -/** - * Factory class for DynamoDB key conditions. - */ -export class KeyCondition { - - /** - * Condition k = arg, true if the key attribute k is equal to the Query argument - */ - public static eq(keyName: string, arg: string): KeyCondition { - return new KeyCondition(new BinaryCondition(keyName, '=', arg)); - } - - /** - * Condition k < arg, true if the key attribute k is less than the Query argument - */ - public static lt(keyName: string, arg: string): KeyCondition { - return new KeyCondition(new BinaryCondition(keyName, '<', arg)); - } - - /** - * Condition k <= arg, true if the key attribute k is less than or equal to the Query argument - */ - public static le(keyName: string, arg: string): KeyCondition { - return new KeyCondition(new BinaryCondition(keyName, '<=', arg)); - } - - /** - * Condition k > arg, true if the key attribute k is greater than the the Query argument - */ - public static gt(keyName: string, arg: string): KeyCondition { - return new KeyCondition(new BinaryCondition(keyName, '>', arg)); - } - - /** - * Condition k >= arg, true if the key attribute k is greater or equal to the Query argument - */ - public static ge(keyName: string, arg: string): KeyCondition { - return new KeyCondition(new BinaryCondition(keyName, '>=', arg)); - } - - /** - * Condition (k, arg). True if the key attribute k begins with the Query argument. - */ - public static beginsWith(keyName: string, arg: string): KeyCondition { - return new KeyCondition(new BeginsWith(keyName, arg)); - } - - /** - * Condition k BETWEEN arg1 AND arg2, true if k >= arg1 and k <= arg2. - */ - public static between(keyName: string, arg1: string, arg2: string): KeyCondition { - return new KeyCondition(new Between(keyName, arg1, arg2)); - } - - private constructor(private readonly cond: BaseKeyCondition) { } - - /** - * Conjunction between two conditions. - */ - public and(keyCond: KeyCondition): KeyCondition { - return new KeyCondition(this.cond.and(keyCond.cond)); - } - - /** - * Renders the key condition to a VTL string. - */ - public renderTemplate(): string { - return `"query" : { - "expression" : "${this.cond.renderCondition()}", - "expressionNames" : { - ${this.cond.renderExpressionNames()} - }, - "expressionValues" : { - ${this.cond.renderExpressionValues()} - } - }`; - } -} - -/** - * Utility class representing the assigment of a value to an attribute. - */ -export class Assign { - constructor(private readonly attr: string, private readonly arg: string) { } - - /** - * Renders the assignment as a VTL string. - */ - public renderAsAssignment(): string { - return `"${this.attr}" : $util.dynamodb.toDynamoDBJson(${this.arg})`; - } - - /** - * Renders the assignment as a map element. - */ - public putInMap(map: string): string { - return `$util.qr($${map}.put("${this.attr}", ${this.arg}))`; - } -} - -/** - * Utility class to allow assigning a value or an auto-generated id - * to a partition key. - */ -export class PartitionKeyStep { - constructor(private readonly key: string) { } - - /** - * Assign an auto-generated value to the partition key. - */ - public is(val: string): PartitionKey { - return new PartitionKey(new Assign(this.key, `$ctx.args.${val}`)); - } - - /** - * Assign an auto-generated value to the partition key. - */ - public auto(): PartitionKey { - return new PartitionKey(new Assign(this.key, '$util.autoId()')); - } -} - -/** - * Utility class to allow assigning a value or an auto-generated id - * to a sort key. - */ -export class SortKeyStep { - constructor(private readonly pkey: Assign, private readonly skey: string) { } - - /** - * Assign an auto-generated value to the sort key. - */ - public is(val: string): PrimaryKey { - return new PrimaryKey(this.pkey, new Assign(this.skey, `$ctx.args.${val}`)); - } - - /** - * Assign an auto-generated value to the sort key. - */ - public auto(): PrimaryKey { - return new PrimaryKey(this.pkey, new Assign(this.skey, '$util.autoId()')); - } -} - -/** - * Specifies the assignment to the primary key. It either - * contains the full primary key or only the partition key. - */ -export class PrimaryKey { - /** - * Allows assigning a value to the partition key. - */ - public static partition(key: string): PartitionKeyStep { - return new PartitionKeyStep(key); - } - - constructor(protected readonly pkey: Assign, private readonly skey?: Assign) { } - - /** - * Renders the key assignment to a VTL string. - */ - public renderTemplate(): string { - const assignments = [this.pkey.renderAsAssignment()]; - if (this.skey) { - assignments.push(this.skey.renderAsAssignment()); - } - return `"key" : { - ${assignments.join(',')} - }`; - } -} - -/** - * Specifies the assignment to the partition key. It can be - * enhanced with the assignment of the sort key. - */ -export class PartitionKey extends PrimaryKey { - constructor(pkey: Assign) { - super(pkey); - } - - /** - * Allows assigning a value to the sort key. - */ - public sort(key: string): SortKeyStep { - return new SortKeyStep(this.pkey, key); - } -} - -/** - * Specifies the attribute value assignments. - */ -export class AttributeValues { - constructor(private readonly container: string, private readonly assignments: Assign[] = []) { } - - /** - * Allows assigning a value to the specified attribute. - */ - public attribute(attr: string): AttributeValuesStep { - return new AttributeValuesStep(attr, this.container, this.assignments); - } - - /** - * Renders the variables required for `renderTemplate`. - */ - public renderVariables(): string { - return `#set($input = ${this.container}) - ${this.assignments.map(a => a.putInMap('input')).join('\n')}`; - } - - /** - * Renders the attribute value assingments to a VTL string. - */ - public renderTemplate(): string { - return '"attributeValues": $util.dynamodb.toMapValuesJson($input)'; - } -} - -/** - * Utility class to allow assigning a value to an attribute. - */ -export class AttributeValuesStep { - constructor(private readonly attr: string, private readonly container: string, private readonly assignments: Assign[]) { } - - /** - * Assign the value to the current attribute. - */ - public is(val: string): AttributeValues { - this.assignments.push(new Assign(this.attr, val)); - return new AttributeValues(this.container, this.assignments); - } -} - -/** - * Factory class for attribute value assignments. - */ -export class Values { - /** - * Treats the specified object as a map of assignments, where the property - * names represent attribute names. It’s opinionated about how it represents - * some of the nested objects: e.g., it will use lists (“L”) rather than sets - * (“SS”, “NS”, “BS”). By default it projects the argument container ("$ctx.args"). - */ - public static projecting(arg?: string): AttributeValues { - return new AttributeValues('$ctx.args' + (arg ? `.${arg}` : '')); - } - - /** - * Allows assigning a value to the specified attribute. - */ - public static attribute(attr: string): AttributeValuesStep { - return new AttributeValues('{}').attribute(attr); - } -} - -/** - * MappingTemplates for AppSync resolvers - */ -export abstract class MappingTemplate { - - /** - * Create a mapping template from the given string - */ - public static fromString(template: string): MappingTemplate { - return new StringMappingTemplate(template); - } - - /** - * Create a mapping template from the given file - */ - public static fromFile(fileName: string): MappingTemplate { - return new StringMappingTemplate(readFileSync(fileName).toString('UTF-8')); - } - - /** - * Mapping template for a result list from DynamoDB - */ - public static dynamoDbResultList(): MappingTemplate { - return this.fromString('$util.toJson($ctx.result.items)'); - } - - /** - * Mapping template for a single result item from DynamoDB - */ - public static dynamoDbResultItem(): MappingTemplate { - return this.fromString('$util.toJson($ctx.result)'); - } - - /** - * Mapping template to scan a DynamoDB table to fetch all entries - */ - public static dynamoDbScanTable(): MappingTemplate { - return this.fromString('{"version" : "2017-02-28", "operation" : "Scan"}'); - } - - /** - * Mapping template to query a set of items from a DynamoDB table - * - * @param cond the key condition for the query - */ - public static dynamoDbQuery(cond: KeyCondition): MappingTemplate { - return this.fromString(`{"version" : "2017-02-28", "operation" : "Query", ${cond.renderTemplate()}}`); - } - - /** - * Mapping template to get a single item from a DynamoDB table - * - * @param keyName the name of the hash key field - * @param idArg the name of the Query argument - */ - public static dynamoDbGetItem(keyName: string, idArg: string): MappingTemplate { - return this.fromString(`{"version": "2017-02-28", "operation": "GetItem", "key": {"${keyName}": $util.dynamodb.toDynamoDBJson($ctx.args.${idArg})}}`); - } - - /** - * Mapping template to delete a single item from a DynamoDB table - * - * @param keyName the name of the hash key field - * @param idArg the name of the Mutation argument - */ - public static dynamoDbDeleteItem(keyName: string, idArg: string): MappingTemplate { - return this.fromString(`{"version": "2017-02-28", "operation": "DeleteItem", "key": {"${keyName}": $util.dynamodb.toDynamoDBJson($ctx.args.${idArg})}}`); - } - - /** - * Mapping template to save a single item to a DynamoDB table - * - * @param key the assigment of Mutation values to the primary key - * @param values the assignment of Mutation values to the table attributes - */ - public static dynamoDbPutItem(key: PrimaryKey, values: AttributeValues): MappingTemplate { - return this.fromString(` - ${values.renderVariables()} - { - "version": "2017-02-28", - "operation": "PutItem", - ${key.renderTemplate()}, - ${values.renderTemplate()} - }`); - } - - /** - * Mapping template to invoke a Lambda function - * - * @param payload the VTL template snippet of the payload to send to the lambda. - * If no payload is provided all available context fields are sent to the Lambda function - */ - public static lambdaRequest(payload: string = '$util.toJson($ctx)'): MappingTemplate { - return this.fromString(`{"version": "2017-02-28", "operation": "Invoke", "payload": ${payload}}`); - } - - /** - * Mapping template to return the Lambda result to the caller - */ - public static lambdaResult(): MappingTemplate { - return this.fromString('$util.toJson($ctx.result)'); - } - - /** - * this is called to render the mapping template to a VTL string - */ - public abstract renderTemplate(): string; - -} - -class StringMappingTemplate extends MappingTemplate { - - constructor(private readonly template: string) { - super(); - } - - public renderTemplate() { - return this.template; - } -} - -/** - * Basic properties for an AppSync resolver - */ -export interface BaseResolverProps { - /** - * name of the GraphQL type this resolver is attached to - */ - readonly typeName: string; - /** - * name of the GraphQL fiel din the given type this resolver is attached to - */ - readonly fieldName: string; - /** - * configuration of the pipeline resolver - * - * @default - create a UNIT resolver - */ - readonly pipelineConfig?: CfnResolver.PipelineConfigProperty | IResolvable; - /** - * The request mapping template for this resolver - * - * @default - No mapping template - */ - readonly requestMappingTemplate?: MappingTemplate; - /** - * The response mapping template for this resolver - * - * @default - No mapping template - */ - readonly responseMappingTemplate?: MappingTemplate; -} - -/** - * Additional properties for an AppSync resolver like GraphQL API reference and datasource - */ -export interface ResolverProps extends BaseResolverProps { - /** - * The API this resolver is attached to - */ - readonly api: GraphQLApi; - /** - * The data source this resolver is using - * - * @default - No datasource - */ - readonly dataSource?: BaseDataSource; -} - -/** - * An AppSync resolver - */ -export class Resolver extends Construct { - - /** - * the ARN of the resolver - */ - public readonly arn: string; - - private resolver: CfnResolver; - - constructor(scope: Construct, id: string, props: ResolverProps) { - super(scope, id); - - this.resolver = new CfnResolver(this, 'Resource', { - apiId: props.api.apiId, - typeName: props.typeName, - fieldName: props.fieldName, - dataSourceName: props.dataSource ? props.dataSource.name : undefined, - kind: props.pipelineConfig ? 'PIPELINE' : 'UNIT', - requestMappingTemplate: props.requestMappingTemplate ? props.requestMappingTemplate.renderTemplate() : undefined, - responseMappingTemplate: props.responseMappingTemplate ? props.responseMappingTemplate.renderTemplate() : undefined, - }); - this.resolver.addDependsOn(props.api.schema); - if (props.dataSource) { - this.resolver.addDependsOn(props.dataSource.ds); - } - this.arn = this.resolver.attrResolverArn; - } -} diff --git a/packages/@aws-cdk/aws-appsync/lib/index.ts b/packages/@aws-cdk/aws-appsync/lib/index.ts index 9df17ffda669d..852cbed1c54f6 100644 --- a/packages/@aws-cdk/aws-appsync/lib/index.ts +++ b/packages/@aws-cdk/aws-appsync/lib/index.ts @@ -1,3 +1,7 @@ // AWS::AppSync CloudFormation Resources: export * from './appsync.generated'; +export * from './key'; +export * from './data-source'; +export * from './mapping-template'; +export * from './resolver'; export * from './graphqlapi'; diff --git a/packages/@aws-cdk/aws-appsync/lib/key.ts b/packages/@aws-cdk/aws-appsync/lib/key.ts new file mode 100644 index 0000000000000..7295a8d6bd032 --- /dev/null +++ b/packages/@aws-cdk/aws-appsync/lib/key.ts @@ -0,0 +1,256 @@ +import { BaseKeyCondition, BeginsWith, Between, BinaryCondition } from './private'; + +/** + * Factory class for DynamoDB key conditions. + */ +export class KeyCondition { + + /** + * Condition k = arg, true if the key attribute k is equal to the Query argument + */ + public static eq(keyName: string, arg: string): KeyCondition { + return new KeyCondition(new BinaryCondition(keyName, '=', arg)); + } + + /** + * Condition k < arg, true if the key attribute k is less than the Query argument + */ + public static lt(keyName: string, arg: string): KeyCondition { + return new KeyCondition(new BinaryCondition(keyName, '<', arg)); + } + + /** + * Condition k <= arg, true if the key attribute k is less than or equal to the Query argument + */ + public static le(keyName: string, arg: string): KeyCondition { + return new KeyCondition(new BinaryCondition(keyName, '<=', arg)); + } + + /** + * Condition k > arg, true if the key attribute k is greater than the the Query argument + */ + public static gt(keyName: string, arg: string): KeyCondition { + return new KeyCondition(new BinaryCondition(keyName, '>', arg)); + } + + /** + * Condition k >= arg, true if the key attribute k is greater or equal to the Query argument + */ + public static ge(keyName: string, arg: string): KeyCondition { + return new KeyCondition(new BinaryCondition(keyName, '>=', arg)); + } + + /** + * Condition (k, arg). True if the key attribute k begins with the Query argument. + */ + public static beginsWith(keyName: string, arg: string): KeyCondition { + return new KeyCondition(new BeginsWith(keyName, arg)); + } + + /** + * Condition k BETWEEN arg1 AND arg2, true if k >= arg1 and k <= arg2. + */ + public static between(keyName: string, arg1: string, arg2: string): KeyCondition { + return new KeyCondition(new Between(keyName, arg1, arg2)); + } + + private constructor(private readonly cond: BaseKeyCondition) { } + + /** + * Conjunction between two conditions. + */ + public and(keyCond: KeyCondition): KeyCondition { + return new KeyCondition(this.cond.and(keyCond.cond)); + } + + /** + * Renders the key condition to a VTL string. + */ + public renderTemplate(): string { + return `"query" : { + "expression" : "${this.cond.renderCondition()}", + "expressionNames" : { + ${this.cond.renderExpressionNames()} + }, + "expressionValues" : { + ${this.cond.renderExpressionValues()} + } + }`; + } +} + +/** + * Utility class representing the assigment of a value to an attribute. + */ +export class Assign { + constructor(private readonly attr: string, private readonly arg: string) { } + + /** + * Renders the assignment as a VTL string. + */ + public renderAsAssignment(): string { + return `"${this.attr}" : $util.dynamodb.toDynamoDBJson(${this.arg})`; + } + + /** + * Renders the assignment as a map element. + */ + public putInMap(map: string): string { + return `$util.qr($${map}.put("${this.attr}", ${this.arg}))`; + } +} + +/** + * Utility class to allow assigning a value or an auto-generated id + * to a partition key. + */ +export class PartitionKeyStep { + constructor(private readonly key: string) { } + + /** + * Assign an auto-generated value to the partition key. + */ + public is(val: string): PartitionKey { + return new PartitionKey(new Assign(this.key, `$ctx.args.${val}`)); + } + + /** + * Assign an auto-generated value to the partition key. + */ + public auto(): PartitionKey { + return new PartitionKey(new Assign(this.key, '$util.autoId()')); + } +} + +/** + * Utility class to allow assigning a value or an auto-generated id + * to a sort key. + */ +export class SortKeyStep { + constructor(private readonly pkey: Assign, private readonly skey: string) { } + + /** + * Assign an auto-generated value to the sort key. + */ + public is(val: string): PrimaryKey { + return new PrimaryKey(this.pkey, new Assign(this.skey, `$ctx.args.${val}`)); + } + + /** + * Assign an auto-generated value to the sort key. + */ + public auto(): PrimaryKey { + return new PrimaryKey(this.pkey, new Assign(this.skey, '$util.autoId()')); + } +} + +/** + * Specifies the assignment to the primary key. It either + * contains the full primary key or only the partition key. + */ +export class PrimaryKey { + /** + * Allows assigning a value to the partition key. + */ + public static partition(key: string): PartitionKeyStep { + return new PartitionKeyStep(key); + } + + constructor(protected readonly pkey: Assign, private readonly skey?: Assign) { } + + /** + * Renders the key assignment to a VTL string. + */ + public renderTemplate(): string { + const assignments = [this.pkey.renderAsAssignment()]; + if (this.skey) { + assignments.push(this.skey.renderAsAssignment()); + } + return `"key" : { + ${assignments.join(',')} + }`; + } +} + +/** + * Specifies the assignment to the partition key. It can be + * enhanced with the assignment of the sort key. + */ +export class PartitionKey extends PrimaryKey { + constructor(pkey: Assign) { + super(pkey); + } + + /** + * Allows assigning a value to the sort key. + */ + public sort(key: string): SortKeyStep { + return new SortKeyStep(this.pkey, key); + } +} + +/** + * Specifies the attribute value assignments. + */ +export class AttributeValues { + constructor(private readonly container: string, private readonly assignments: Assign[] = []) { } + + /** + * Allows assigning a value to the specified attribute. + */ + public attribute(attr: string): AttributeValuesStep { + return new AttributeValuesStep(attr, this.container, this.assignments); + } + + /** + * Renders the variables required for `renderTemplate`. + */ + public renderVariables(): string { + return `#set($input = ${this.container}) + ${this.assignments.map(a => a.putInMap('input')).join('\n')}`; + } + + /** + * Renders the attribute value assingments to a VTL string. + */ + public renderTemplate(): string { + return '"attributeValues": $util.dynamodb.toMapValuesJson($input)'; + } +} + +/** + * Utility class to allow assigning a value to an attribute. + */ +export class AttributeValuesStep { + constructor(private readonly attr: string, private readonly container: string, private readonly assignments: Assign[]) { } + + /** + * Assign the value to the current attribute. + */ + public is(val: string): AttributeValues { + this.assignments.push(new Assign(this.attr, val)); + return new AttributeValues(this.container, this.assignments); + } +} + +/** + * Factory class for attribute value assignments. + */ +export class Values { + /** + * Treats the specified object as a map of assignments, where the property + * names represent attribute names. It’s opinionated about how it represents + * some of the nested objects: e.g., it will use lists (“L”) rather than sets + * (“SS”, “NS”, “BS”). By default it projects the argument container ("$ctx.args"). + */ + public static projecting(arg?: string): AttributeValues { + return new AttributeValues('$ctx.args' + (arg ? `.${arg}` : '')); + } + + /** + * Allows assigning a value to the specified attribute. + */ + public static attribute(attr: string): AttributeValuesStep { + return new AttributeValues('{}').attribute(attr); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/aws-appsync/lib/mapping-template.ts b/packages/@aws-cdk/aws-appsync/lib/mapping-template.ts new file mode 100644 index 0000000000000..d4c0011c54342 --- /dev/null +++ b/packages/@aws-cdk/aws-appsync/lib/mapping-template.ts @@ -0,0 +1,121 @@ +import { readFileSync } from 'fs'; +import { AttributeValues, KeyCondition, PrimaryKey } from './key'; + +/** + * MappingTemplates for AppSync resolvers + */ +export abstract class MappingTemplate { + /** + * Create a mapping template from the given string + */ + public static fromString(template: string): MappingTemplate { + return new StringMappingTemplate(template); + } + + /** + * Create a mapping template from the given file + */ + public static fromFile(fileName: string): MappingTemplate { + return new StringMappingTemplate(readFileSync(fileName).toString('UTF-8')); + } + + /** + * Mapping template for a result list from DynamoDB + */ + public static dynamoDbResultList(): MappingTemplate { + return this.fromString('$util.toJson($ctx.result.items)'); + } + + /** + * Mapping template for a single result item from DynamoDB + */ + public static dynamoDbResultItem(): MappingTemplate { + return this.fromString('$util.toJson($ctx.result)'); + } + + /** + * Mapping template to scan a DynamoDB table to fetch all entries + */ + public static dynamoDbScanTable(): MappingTemplate { + return this.fromString('{"version" : "2017-02-28", "operation" : "Scan"}'); + } + + /** + * Mapping template to query a set of items from a DynamoDB table + * + * @param cond the key condition for the query + */ + public static dynamoDbQuery(cond: KeyCondition): MappingTemplate { + return this.fromString(`{"version" : "2017-02-28", "operation" : "Query", ${cond.renderTemplate()}}`); + } + + /** + * Mapping template to get a single item from a DynamoDB table + * + * @param keyName the name of the hash key field + * @param idArg the name of the Query argument + */ + public static dynamoDbGetItem(keyName: string, idArg: string): MappingTemplate { + return this.fromString(`{"version": "2017-02-28", "operation": "GetItem", "key": {"${keyName}": $util.dynamodb.toDynamoDBJson($ctx.args.${idArg})}}`); + } + + /** + * Mapping template to delete a single item from a DynamoDB table + * + * @param keyName the name of the hash key field + * @param idArg the name of the Mutation argument + */ + public static dynamoDbDeleteItem(keyName: string, idArg: string): MappingTemplate { + return this.fromString(`{"version": "2017-02-28", "operation": "DeleteItem", "key": {"${keyName}": $util.dynamodb.toDynamoDBJson($ctx.args.${idArg})}}`); + } + + /** + * Mapping template to save a single item to a DynamoDB table + * + * @param key the assigment of Mutation values to the primary key + * @param values the assignment of Mutation values to the table attributes + */ + public static dynamoDbPutItem(key: PrimaryKey, values: AttributeValues): MappingTemplate { + return this.fromString(` + ${values.renderVariables()} + { + "version": "2017-02-28", + "operation": "PutItem", + ${key.renderTemplate()}, + ${values.renderTemplate()} + }`); + } + + /** + * Mapping template to invoke a Lambda function + * + * @param payload the VTL template snippet of the payload to send to the lambda. + * If no payload is provided all available context fields are sent to the Lambda function + */ + public static lambdaRequest(payload: string = '$util.toJson($ctx)'): MappingTemplate { + return this.fromString(`{"version": "2017-02-28", "operation": "Invoke", "payload": ${payload}}`); + } + + /** + * Mapping template to return the Lambda result to the caller + */ + public static lambdaResult(): MappingTemplate { + return this.fromString('$util.toJson($ctx.result)'); + } + + /** + * this is called to render the mapping template to a VTL string + */ + public abstract renderTemplate(): string; +} + +class StringMappingTemplate extends MappingTemplate { + + constructor(private readonly template: string) { + super(); + } + + public renderTemplate() { + return this.template; + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/aws-appsync/lib/private.ts b/packages/@aws-cdk/aws-appsync/lib/private.ts new file mode 100644 index 0000000000000..9118b503349c3 --- /dev/null +++ b/packages/@aws-cdk/aws-appsync/lib/private.ts @@ -0,0 +1,113 @@ +function concatAndDedup(left: T[], right: T[]): T[] { + return left.concat(right).filter((elem, index, self) => { + return index === self.indexOf(elem); + }); +} + +/** + * Utility class to represent DynamoDB key conditions. + */ +export abstract class BaseKeyCondition { + public and(cond: BaseKeyCondition): BaseKeyCondition { + return new (class extends BaseKeyCondition { + constructor(private readonly left: BaseKeyCondition, private readonly right: BaseKeyCondition) { + super(); + } + + public renderCondition(): string { + return `${this.left.renderCondition()} AND ${this.right.renderCondition()}`; + } + + public keyNames(): string[] { + return concatAndDedup(this.left.keyNames(), this.right.keyNames()); + } + + public args(): string[] { + return concatAndDedup(this.left.args(), this.right.args()); + } + })(this, cond); + } + + public renderExpressionNames(): string { + return this.keyNames() + .map((keyName: string) => { + return `"#${keyName}" : "${keyName}"`; + }) + .join(', '); + } + + public renderExpressionValues(): string { + return this.args() + .map((arg: string) => { + return `":${arg}" : $util.dynamodb.toDynamoDBJson($ctx.args.${arg})`; + }) + .join(', '); + } + + public abstract renderCondition(): string; + public abstract keyNames(): string[]; + public abstract args(): string[]; +} + +/** + * Utility class to represent DynamoDB "begins_with" key conditions. + */ +export class BeginsWith extends BaseKeyCondition { + constructor(private readonly keyName: string, private readonly arg: string) { + super(); + } + + public renderCondition(): string { + return `begins_with(#${this.keyName}, :${this.arg})`; + } + + public keyNames(): string[] { + return [this.keyName]; + } + + public args(): string[] { + return [this.arg]; + } +} + +/** + * Utility class to represent DynamoDB binary key conditions. + */ +export class BinaryCondition extends BaseKeyCondition { + constructor(private readonly keyName: string, private readonly op: string, private readonly arg: string) { + super(); + } + + public renderCondition(): string { + return `#${this.keyName} ${this.op} :${this.arg}`; + } + + public keyNames(): string[] { + return [this.keyName]; + } + + public args(): string[] { + return [this.arg]; + } +} + +/** + * Utility class to represent DynamoDB "between" key conditions. + */ +export class Between extends BaseKeyCondition { + constructor(private readonly keyName: string, private readonly arg1: string, private readonly arg2: string) { + super(); + } + + public renderCondition(): string { + return `#${this.keyName} BETWEEN :${this.arg1} AND :${this.arg2}`; + } + + public keyNames(): string[] { + return [this.keyName]; + } + + public args(): string[] { + return [this.arg1, this.arg2]; + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/aws-appsync/lib/resolver.ts b/packages/@aws-cdk/aws-appsync/lib/resolver.ts new file mode 100644 index 0000000000000..ef5e524ac75ae --- /dev/null +++ b/packages/@aws-cdk/aws-appsync/lib/resolver.ts @@ -0,0 +1,83 @@ +import { Construct, IResolvable } from '@aws-cdk/core'; +import { CfnResolver } from './appsync.generated'; +import { BaseDataSource } from './data-source'; +import { GraphQLApi } from './graphqlapi'; +import { MappingTemplate } from './mapping-template'; +/** + * Basic properties for an AppSync resolver + */ +export interface BaseResolverProps { + /** + * name of the GraphQL type this resolver is attached to + */ + readonly typeName: string; + /** + * name of the GraphQL fiel din the given type this resolver is attached to + */ + readonly fieldName: string; + /** + * configuration of the pipeline resolver + * + * @default - create a UNIT resolver + */ + readonly pipelineConfig?: CfnResolver.PipelineConfigProperty | IResolvable; + /** + * The request mapping template for this resolver + * + * @default - No mapping template + */ + readonly requestMappingTemplate?: MappingTemplate; + /** + * The response mapping template for this resolver + * + * @default - No mapping template + */ + readonly responseMappingTemplate?: MappingTemplate; +} + +/** + * Additional properties for an AppSync resolver like GraphQL API reference and datasource + */ +export interface ResolverProps extends BaseResolverProps { + /** + * The API this resolver is attached to + */ + readonly api: GraphQLApi; + /** + * The data source this resolver is using + * + * @default - No datasource + */ + readonly dataSource?: BaseDataSource; +} + +/** + * An AppSync resolver + */ +export class Resolver extends Construct { + /** + * the ARN of the resolver + */ + public readonly arn: string; + + private resolver: CfnResolver; + + constructor(scope: Construct, id: string, props: ResolverProps) { + super(scope, id); + + this.resolver = new CfnResolver(this, 'Resource', { + apiId: props.api.apiId, + typeName: props.typeName, + fieldName: props.fieldName, + dataSourceName: props.dataSource ? props.dataSource.name : undefined, + kind: props.pipelineConfig ? 'PIPELINE' : 'UNIT', + requestMappingTemplate: props.requestMappingTemplate ? props.requestMappingTemplate.renderTemplate() : undefined, + responseMappingTemplate: props.responseMappingTemplate ? props.responseMappingTemplate.renderTemplate() : undefined, + }); + this.resolver.addDependsOn(props.api.schema); + if (props.dataSource) { + this.resolver.addDependsOn(props.dataSource.ds); + } + this.arn = this.resolver.attrResolverArn; + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/aws-appsync/test/integ.graphql.expected.json b/packages/@aws-cdk/aws-appsync/test/integ.graphql.expected.json index 9fe51c84d0b28..41da6184b5089 100644 --- a/packages/@aws-cdk/aws-appsync/test/integ.graphql.expected.json +++ b/packages/@aws-cdk/aws-appsync/test/integ.graphql.expected.json @@ -106,7 +106,7 @@ "ApiId": { "Fn::GetAtt": ["ApiF70053CD", "ApiId"] }, - "Definition": "type ServiceVersion {\n version: String!\n}\n\ntype Customer {\n id: String!\n name: String!\n}\n\ninput SaveCustomerInput {\n name: String!\n}\n\ntype Order {\n customer: String!\n order: String!\n}\n\ntype Query {\n getServiceVersion: ServiceVersion\n getCustomers: [Customer]\n getCustomer(id: String): Customer\n getCustomerOrdersEq(customer: String): Order\n getCustomerOrdersLt(customer: String): Order\n getCustomerOrdersLe(customer: String): Order\n getCustomerOrdersGt(customer: String): Order\n getCustomerOrdersGe(customer: String): Order\n getCustomerOrdersFilter(customer: String, order: String): Order\n getCustomerOrdersBetween(customer: String, order1: String, order2: String): Order\n}\n\ninput FirstOrderInput {\n product: String!\n quantity: Int!\n}\n\ntype Mutation {\n addCustomer(customer: SaveCustomerInput!): Customer\n saveCustomer(id: String!, customer: SaveCustomerInput!): Customer\n removeCustomer(id: String!): Customer\n saveCustomerWithFirstOrder(customer: SaveCustomerInput!, order: FirstOrderInput!, referral: String): Order\n doPostOnAws: String!\n}\n" + "Definition": "type ServiceVersion {\n version: String!\n}\n\ntype Customer {\n id: String!\n name: String!\n}\n\ninput SaveCustomerInput {\n name: String!\n}\n\ntype Order {\n customer: String!\n order: String!\n}\n\ntype Payment {\n id: String!\n amount: String!\n}\n\ntype Query {\n getServiceVersion: ServiceVersion\n getCustomers: [Customer]\n getCustomer(id: String): Customer\n getCustomerOrdersEq(customer: String): Order\n getCustomerOrdersLt(customer: String): Order\n getCustomerOrdersLe(customer: String): Order\n getCustomerOrdersGt(customer: String): Order\n getCustomerOrdersGe(customer: String): Order\n getCustomerOrdersFilter(customer: String, order: String): Order\n getCustomerOrdersBetween(customer: String, order1: String, order2: String): Order\n getPayment(id: String): Payment\n}\n\ninput FirstOrderInput {\n product: String!\n quantity: Int!\n}\n\ntype Mutation {\n addCustomer(customer: SaveCustomerInput!): Customer\n saveCustomer(id: String!, customer: SaveCustomerInput!): Customer\n removeCustomer(id: String!): Customer\n saveCustomerWithFirstOrder(customer: SaveCustomerInput!, order: FirstOrderInput!, referral: String): Order\n savePayment(payment: Payment): Payment\n doPostOnAws: String!\n}\n" } }, "ApiNoneDSB4E6495F": { @@ -591,6 +591,147 @@ "ApiSchema510EECD7" ] }, + "ApiPaymentDSServiceRole7A857DD9": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "appsync.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "ApiPaymentDSServiceRoleDefaultPolicy1BE875C5": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "dynamodb:BatchGetItem", + "dynamodb:GetRecords", + "dynamodb:GetShardIterator", + "dynamodb:Query", + "dynamodb:GetItem", + "dynamodb:Scan", + "dynamodb:BatchWriteItem", + "dynamodb:PutItem", + "dynamodb:UpdateItem", + "dynamodb:DeleteItem" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":dynamodb:", + { + "Ref": "AWS::Region" + }, + ":", + { + "Ref": "AWS::AccountId" + }, + ":table/PaymentTable" + ] + ] + }, + { + "Ref": "AWS::NoValue" + } + ] + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "ApiPaymentDSServiceRoleDefaultPolicy1BE875C5", + "Roles": [ + { + "Ref": "ApiPaymentDSServiceRole7A857DD9" + } + ] + } + }, + "ApiPaymentDS69022256": { + "Type": "AWS::AppSync::DataSource", + "Properties": { + "ApiId": { + "Fn::GetAtt": [ + "ApiF70053CD", + "ApiId" + ] + }, + "Name": "Payment", + "Type": "AMAZON_DYNAMODB", + "Description": "The payment data source", + "DynamoDBConfig": { + "AwsRegion": { + "Ref": "AWS::Region" + }, + "TableName": "PaymentTable" + }, + "ServiceRoleArn": { + "Fn::GetAtt": [ + "ApiPaymentDSServiceRole7A857DD9", + "Arn" + ] + } + } + }, + "ApiPaymentDSQuerygetPaymentResolver25686F48": { + "Type": "AWS::AppSync::Resolver", + "Properties": { + "ApiId": { + "Fn::GetAtt": [ + "ApiF70053CD", + "ApiId" + ] + }, + "FieldName": "getPayment", + "TypeName": "Query", + "DataSourceName": "Payment", + "Kind": "UNIT", + "RequestMappingTemplate": "{\"version\": \"2017-02-28\", \"operation\": \"GetItem\", \"key\": {\"id\": $util.dynamodb.toDynamoDBJson($ctx.args.id)}}", + "ResponseMappingTemplate": "$util.toJson($ctx.result)" + }, + "DependsOn": [ + "ApiPaymentDS69022256", + "ApiSchema510EECD7" + ] + }, + "ApiPaymentDSMutationsavePaymentResolver08FBC62D": { + "Type": "AWS::AppSync::Resolver", + "Properties": { + "ApiId": { + "Fn::GetAtt": [ + "ApiF70053CD", + "ApiId" + ] + }, + "FieldName": "savePayment", + "TypeName": "Mutation", + "DataSourceName": "Payment", + "Kind": "UNIT", + "RequestMappingTemplate": "\n #set($input = $ctx.args.payment)\n \n {\n \"version\": \"2017-02-28\",\n \"operation\": \"PutItem\",\n \"key\" : {\n \"id\" : $util.dynamodb.toDynamoDBJson($ctx.args.id)\n },\n \"attributeValues\": $util.dynamodb.toMapValuesJson($input)\n }", + "ResponseMappingTemplate": "$util.toJson($ctx.result)" + }, + "DependsOn": [ + "ApiPaymentDS69022256", + "ApiSchema510EECD7" + ] + }, "ApihttpDSServiceRole8B5C9457": { "Type": "AWS::IAM::Role", "Properties": { diff --git a/packages/@aws-cdk/aws-appsync/test/integ.graphql.ts b/packages/@aws-cdk/aws-appsync/test/integ.graphql.ts index fb2be9eeac531..f7da8537c351e 100644 --- a/packages/@aws-cdk/aws-appsync/test/integ.graphql.ts +++ b/packages/@aws-cdk/aws-appsync/test/integ.graphql.ts @@ -71,9 +71,11 @@ const orderTable = new Table(stack, 'OrderTable', { }, removalPolicy: RemovalPolicy.DESTROY, }); +const paymentTable = Table.fromTableName(stack, 'PaymentTable', 'PaymentTable'); const customerDS = api.addDynamoDbDataSource('Customer', 'The customer data source', customerTable); const orderDS = api.addDynamoDbDataSource('Order', 'The order data source', orderTable); +const paymentDS = api.addDynamoDbDataSource('Payment', 'The payment data source', paymentTable); customerDS.createResolver({ typeName: 'Query', @@ -148,6 +150,19 @@ orderDS.createResolver({ responseMappingTemplate: MappingTemplate.dynamoDbResultList(), }); +paymentDS.createResolver({ + typeName: 'Query', + fieldName: 'getPayment', + requestMappingTemplate: MappingTemplate.dynamoDbGetItem('id', 'id'), + responseMappingTemplate: MappingTemplate.dynamoDbResultItem(), +}); +paymentDS.createResolver({ + typeName: 'Mutation', + fieldName: 'savePayment', + requestMappingTemplate: MappingTemplate.dynamoDbPutItem(PrimaryKey.partition('id').is('id'), Values.projecting('payment')), + responseMappingTemplate: MappingTemplate.dynamoDbResultItem(), +}); + const httpDS = api.addHttpDataSource('http', 'The http data source', 'https://aws.amazon.com/'); httpDS.createResolver({ diff --git a/packages/@aws-cdk/aws-appsync/test/schema.graphql b/packages/@aws-cdk/aws-appsync/test/schema.graphql index 24af9a154ec59..09a539d883df9 100644 --- a/packages/@aws-cdk/aws-appsync/test/schema.graphql +++ b/packages/@aws-cdk/aws-appsync/test/schema.graphql @@ -16,6 +16,11 @@ type Order { order: String! } +type Payment { + id: String! + amount: String! +} + type Query { getServiceVersion: ServiceVersion getCustomers: [Customer] @@ -27,6 +32,7 @@ type Query { getCustomerOrdersGe(customer: String): Order getCustomerOrdersFilter(customer: String, order: String): Order getCustomerOrdersBetween(customer: String, order1: String, order2: String): Order + getPayment(id: String): Payment } input FirstOrderInput { @@ -39,5 +45,6 @@ type Mutation { saveCustomer(id: String!, customer: SaveCustomerInput!): Customer removeCustomer(id: String!): Customer saveCustomerWithFirstOrder(customer: SaveCustomerInput!, order: FirstOrderInput!, referral: String): Order + savePayment(payment: Payment): Payment doPostOnAws: String! } diff --git a/packages/@aws-cdk/aws-autoscaling-common/package.json b/packages/@aws-cdk/aws-autoscaling-common/package.json index ccabb1ecb87e2..ec3b5affa8cc1 100644 --- a/packages/@aws-cdk/aws-autoscaling-common/package.json +++ b/packages/@aws-cdk/aws-autoscaling-common/package.json @@ -62,7 +62,7 @@ "@types/nodeunit": "^0.0.31", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", - "fast-check": "^1.25.1", + "fast-check": "^1.26.0", "nodeunit": "^0.11.3", "pkglint": "0.0.0" }, diff --git a/packages/@aws-cdk/aws-cloudfront/.gitignore b/packages/@aws-cdk/aws-cloudfront/.gitignore index 27e8c6a71e4ff..b97b84d8d9189 100644 --- a/packages/@aws-cdk/aws-cloudfront/.gitignore +++ b/packages/@aws-cdk/aws-cloudfront/.gitignore @@ -13,3 +13,4 @@ nyc.config.js .LAST_PACKAGE *.snk !.eslintrc.js +!jest.config.js diff --git a/packages/@aws-cdk/aws-cloudfront/.npmignore b/packages/@aws-cdk/aws-cloudfront/.npmignore index eb063bd7f38c8..d9a27cdcdce6e 100644 --- a/packages/@aws-cdk/aws-cloudfront/.npmignore +++ b/packages/@aws-cdk/aws-cloudfront/.npmignore @@ -22,4 +22,5 @@ tsconfig.json .eslintrc.js # exclude cdk artifacts -**/cdk.out \ No newline at end of file +**/cdk.out +jest.config.js diff --git a/packages/@aws-cdk/aws-cloudfront/README.md b/packages/@aws-cdk/aws-cloudfront/README.md index b0ae2f6f79ba3..04488d3670742 100644 --- a/packages/@aws-cdk/aws-cloudfront/README.md +++ b/packages/@aws-cdk/aws-cloudfront/README.md @@ -80,4 +80,26 @@ new cloudfront.CloudFrontWebDistribution(stack, 'MyDistribution', { //... geoRestriction: GeoRestriction.whitelist('US', 'UK') }); -``` \ No newline at end of file +``` + +### Connection behaviors between CloudFront and your origin. + +CloudFront provides you even more control over the connection behaviors between CloudFront and your origin. You can now configure the number of connection attempts CloudFront will make to your origin and the origin connection timeout for each attempt. + +See [Origin Connection Attempts](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-values-specify.html#origin-connection-attempts) + +See [Origin Connection Timeout](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-values-specify.html#origin-connection-timeout) + +Example usage: + +```ts +const distribution = new CloudFrontWebDistribution(this, 'MyDistribution', { + originConfigs: [ + { + ..., + connectionAttempts: 3, + connectionTimeout: cdk.Duration.seconds(10), + } + ] +}); +``` diff --git a/packages/@aws-cdk/aws-cloudfront/jest.config.js b/packages/@aws-cdk/aws-cloudfront/jest.config.js new file mode 100644 index 0000000000000..f5d5c4c8ad18f --- /dev/null +++ b/packages/@aws-cdk/aws-cloudfront/jest.config.js @@ -0,0 +1,2 @@ +const baseConfig = require('../../../tools/cdk-build-tools/config/jest.config'); +module.exports = baseConfig; \ No newline at end of file diff --git a/packages/@aws-cdk/aws-cloudfront/lib/web_distribution.ts b/packages/@aws-cdk/aws-cloudfront/lib/web_distribution.ts index ab5eda0eb0834..f6c9c0fe89019 100644 --- a/packages/@aws-cdk/aws-cloudfront/lib/web_distribution.ts +++ b/packages/@aws-cdk/aws-cloudfront/lib/web_distribution.ts @@ -140,15 +140,31 @@ export interface LoggingConfiguration { * One or the other must be passed, and it is invalid to pass both in the same SourceConfiguration. */ export interface SourceConfiguration { + /** + * The number of times that CloudFront attempts to connect to the origin. + * You can specify 1, 2, or 3 as the number of attempts. + * + * @default 3 + */ + readonly connectionAttempts?: number; + + /** + * The number of seconds that CloudFront waits when trying to establish a connection to the origin. + * You can specify a number of seconds between 1 and 10 (inclusive). + * + * @default cdk.Duration.seconds(10) + */ + readonly connectionTimeout?: cdk.Duration; + /** * An s3 origin source - if you're using s3 for your assets */ - readonly s3OriginSource?: S3OriginConfig + readonly s3OriginSource?: S3OriginConfig; /** * A custom origin source - for all non-s3 sources. */ - readonly customOriginSource?: CustomOriginConfig, + readonly customOriginSource?: CustomOriginConfig; /** * The behaviors associated with this source. @@ -161,7 +177,7 @@ export interface SourceConfiguration { * * @default / */ - readonly originPath?: string, + readonly originPath?: string; /** * Any additional headers to pass to the origin @@ -771,6 +787,16 @@ export class CloudFrontWebDistribution extends cdk.Construct implements IDistrib } } + const connectionAttempts = originConfig.connectionAttempts ?? 3; + if (connectionAttempts < 1 || 3 < connectionAttempts || !Number.isInteger(connectionAttempts)) { + throw new Error('connectionAttempts: You can specify 1, 2, or 3 as the number of attempts.'); + } + + const connectionTimeout = (originConfig.connectionTimeout || cdk.Duration.seconds(10)).toSeconds(); + if (connectionTimeout < 1 || 10 < connectionTimeout || !Number.isInteger(connectionTimeout)) { + throw new Error('connectionTimeout: You can specify a number of seconds between 1 and 10 (inclusive).'); + } + const originProperty: CfnDistribution.OriginProperty = { id: originId, domainName: originConfig.s3OriginSource @@ -791,6 +817,8 @@ export class CloudFrontWebDistribution extends cdk.Construct implements IDistrib originSslProtocols: originConfig.customOriginSource.allowedOriginSSLVersions || [OriginSslPolicy.TLS_V1_2], } : undefined, + connectionAttempts, + connectionTimeout, }; for (const behavior of originConfig.behaviors) { diff --git a/packages/@aws-cdk/aws-cloudfront/package.json b/packages/@aws-cdk/aws-cloudfront/package.json index 27f6835b0f021..a8707a89058a1 100644 --- a/packages/@aws-cdk/aws-cloudfront/package.json +++ b/packages/@aws-cdk/aws-cloudfront/package.json @@ -47,7 +47,8 @@ "compat": "cdk-compat" }, "cdk-build": { - "cloudformation": "AWS::CloudFront" + "cloudformation": "AWS::CloudFront", + "jest": true }, "keywords": [ "aws", @@ -63,12 +64,11 @@ "license": "Apache-2.0", "devDependencies": { "@aws-cdk/assert": "0.0.0", - "@types/nodeunit": "^0.0.31", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", - "nodeunit": "^0.11.3", + "nodeunit-shim": "0.0.0", "pkglint": "0.0.0" }, "dependencies": { diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-bucket-logging.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-bucket-logging.expected.json index 70780cca41033..36a334898a57f 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-bucket-logging.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-bucket-logging.expected.json @@ -44,6 +44,8 @@ }, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "CustomOriginConfig": { "HTTPPort": 80, "HTTPSPort": 443, @@ -114,6 +116,8 @@ }, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "CustomOriginConfig": { "HTTPPort": 80, "HTTPSPort": 443, diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom-s3.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom-s3.expected.json index 954b13e1edc67..7699faf6df792 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom-s3.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom-s3.expected.json @@ -72,6 +72,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "CustomOriginConfig": { "HTTPPort": 80, "HTTPSPort": 443, diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom.expected.json index 92071bb9ef237..5943d0d822d3c 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-custom.expected.json @@ -29,6 +29,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "CustomOriginConfig": { "HTTPPort": 80, "HTTPSPort": 443, diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-empty-root.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-empty-root.expected.json index a5298a4968004..9b20011d6a9a1 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-empty-root.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-empty-root.expected.json @@ -29,6 +29,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "CustomOriginConfig": { "HTTPPort": 80, "HTTPSPort": 443, diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-geo-restrictions.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-geo-restrictions.expected.json index 25ae7addfd317..9307879bca924 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-geo-restrictions.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-geo-restrictions.expected.json @@ -34,6 +34,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "DomainName": { "Fn::GetAtt": [ "Bucket83908E77", diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-ipv6-disabled.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-ipv6-disabled.expected.json index 5ffa5da872430..30bc881df48f3 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-ipv6-disabled.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-ipv6-disabled.expected.json @@ -34,6 +34,8 @@ "IPV6Enabled": false, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "DomainName": { "Fn::GetAtt": [ "Bucket83908E77", diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-lambda-association.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-lambda-association.expected.json index cce226bd3a090..eb45a5dd3192d 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-lambda-association.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-lambda-association.expected.json @@ -107,6 +107,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "DomainName": { "Fn::GetAtt": [ "Bucket83908E77", diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-s3.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-s3.expected.json index b1ddafe584cf3..8f6357e72272e 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-s3.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-s3.expected.json @@ -102,6 +102,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "DomainName": { "Fn::GetAtt": [ "Bucket83908E77", diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-security-policy.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-security-policy.expected.json index 861887bd54f48..068f2e3492856 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-security-policy.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront-security-policy.expected.json @@ -32,6 +32,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "CustomOriginConfig": { "HTTPPort": 80, "HTTPSPort": 443, diff --git a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront.expected.json b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront.expected.json index a5f3615733d82..acadf8baf6866 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront.expected.json +++ b/packages/@aws-cdk/aws-cloudfront/test/integ.cloudfront.expected.json @@ -34,6 +34,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "DomainName": { "Fn::GetAtt": [ "Bucket83908E77", diff --git a/packages/@aws-cdk/aws-cloudfront/test/test.oai.ts b/packages/@aws-cdk/aws-cloudfront/test/oai.test.ts similarity index 95% rename from packages/@aws-cdk/aws-cloudfront/test/test.oai.ts rename to packages/@aws-cdk/aws-cloudfront/test/oai.test.ts index bd067aa902f03..36616f12471ba 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/test.oai.ts +++ b/packages/@aws-cdk/aws-cloudfront/test/oai.test.ts @@ -1,11 +1,11 @@ import { expect } from '@aws-cdk/assert'; import * as cdk from '@aws-cdk/core'; -import { Test } from 'nodeunit'; +import { nodeunitShim, Test } from 'nodeunit-shim'; import { OriginAccessIdentity } from '../lib'; // tslint:disable:object-literal-key-quotes -export = { +nodeunitShim({ 'Origin Access Identity with automatic comment'(test: Test) { const stack = new cdk.Stack(); @@ -66,4 +66,4 @@ export = { test.done(); }, -}; +}); diff --git a/packages/@aws-cdk/aws-cloudfront/test/test.basic.ts b/packages/@aws-cdk/aws-cloudfront/test/web_distribution.test.ts similarity index 82% rename from packages/@aws-cdk/aws-cloudfront/test/test.basic.ts rename to packages/@aws-cdk/aws-cloudfront/test/web_distribution.test.ts index 39366da64117d..245f85a176d4b 100644 --- a/packages/@aws-cdk/aws-cloudfront/test/test.basic.ts +++ b/packages/@aws-cdk/aws-cloudfront/test/web_distribution.test.ts @@ -3,7 +3,7 @@ import * as certificatemanager from '@aws-cdk/aws-certificatemanager'; import * as lambda from '@aws-cdk/aws-lambda'; import * as s3 from '@aws-cdk/aws-s3'; import * as cdk from '@aws-cdk/core'; -import { Test } from 'nodeunit'; +import { nodeunitShim, Test } from 'nodeunit-shim'; import { CfnDistribution, CloudFrontWebDistribution, @@ -17,7 +17,7 @@ import { // tslint:disable:object-literal-key-quotes -export = { +nodeunitShim({ 'distribution with custom origin adds custom origin'(test: Test) { const stack = new cdk.Stack(); @@ -82,6 +82,8 @@ export = { 'TLSv1.2', ], }, + 'ConnectionAttempts': 3, + 'ConnectionTimeout': 10, 'DomainName': 'myorigin.com', 'Id': 'origin1', 'OriginCustomHeaders': [ @@ -139,6 +141,8 @@ export = { 'DefaultRootObject': 'index.html', 'Origins': [ { + 'ConnectionAttempts': 3, + 'ConnectionTimeout': 10, 'DomainName': { 'Fn::GetAtt': [ 'Bucket83908E77', @@ -215,6 +219,8 @@ export = { 'DefaultRootObject': 'index.html', 'Origins': [ { + 'ConnectionAttempts': 3, + 'ConnectionTimeout': 10, 'DomainName': { 'Fn::GetAtt': [ 'Bucket83908E77', @@ -294,6 +300,8 @@ export = { 'DefaultRootObject': 'index.html', 'Origins': [ { + 'ConnectionAttempts': 3, + 'ConnectionTimeout': 10, 'DomainName': { 'Fn::GetAtt': [ 'Bucket83908E77', @@ -370,6 +378,8 @@ export = { 'DefaultRootObject': 'index.html', 'Origins': [ { + 'ConnectionAttempts': 3, + 'ConnectionTimeout': 10, 'DomainName': { 'Fn::GetAtt': [ 'Bucket83908E77', @@ -905,6 +915,8 @@ export = { 'DefaultRootObject': 'index.html', 'Origins': [ { + 'ConnectionAttempts': 3, + 'ConnectionTimeout': 10, 'DomainName': { 'Fn::GetAtt': [ 'Bucket83908E77', @@ -979,6 +991,8 @@ export = { 'DefaultRootObject': 'index.html', 'Origins': [ { + 'ConnectionAttempts': 3, + 'ConnectionTimeout': 10, 'DomainName': { 'Fn::GetAtt': [ 'Bucket83908E77', @@ -1032,25 +1046,175 @@ export = { 'throws if locations is empty array'(test: Test) { test.throws(() => { GeoRestriction.whitelist(); - }, 'Should provide at least 1 location'); + }, /Should provide at least 1 location/); test.throws(() => { GeoRestriction.blacklist(); - }, 'Should provide at least 1 location'); + }, /Should provide at least 1 location/); test.done(); }, 'throws if locations format is wrong'(test: Test) { test.throws(() => { GeoRestriction.whitelist('us'); - }, 'Invalid location format for location: us, location should be two-letter and uppercase country ISO 3166-1-alpha-2 code'); + }, /Invalid location format for location: us, location should be two-letter and uppercase country ISO 3166-1-alpha-2 code/); test.throws(() => { GeoRestriction.blacklist('us'); - }, 'Invalid location format for location: us, location should be two-letter and uppercase country ISO 3166-1-alpha-2 code'); + }, /Invalid location format for location: us, location should be two-letter and uppercase country ISO 3166-1-alpha-2 code/); test.done(); }, }, }, -}; + + 'Connection behaviors between CloudFront and your origin': { + 'success': { + 'connectionAttempts = 1'(test: Test) { + const stack = new cdk.Stack(); + test.doesNotThrow(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionAttempts: 1, + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionAttempts: You can specify 1, 2, or 3 as the number of attempts./); + test.done(); + }, + '3 = connectionAttempts'(test: Test) { + const stack = new cdk.Stack(); + test.doesNotThrow(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionAttempts: 3, + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionAttempts: You can specify 1, 2, or 3 as the number of attempts./); + test.done(); + }, + 'connectionTimeout = 1'(test: Test) { + const stack = new cdk.Stack(); + test.doesNotThrow(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionTimeout: cdk.Duration.seconds(1), + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionTimeout: You can specify a number of seconds between 1 and 10 (inclusive)./); + test.done(); + }, + '10 = connectionTimeout'(test: Test) { + const stack = new cdk.Stack(); + test.doesNotThrow(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionTimeout: cdk.Duration.seconds(10), + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionTimeout: You can specify a number of seconds between 1 and 10 (inclusive)./); + test.done(); + }, + }, + 'errors': { + 'connectionAttempts = 1.1'(test: Test) { + const stack = new cdk.Stack(); + test.throws(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionAttempts: 1.1, + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionAttempts: You can specify 1, 2, or 3 as the number of attempts./); + test.done(); + }, + 'connectionAttempts = -1'(test: Test) { + const stack = new cdk.Stack(); + test.throws(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionAttempts: -1, + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionAttempts: You can specify 1, 2, or 3 as the number of attempts./); + test.done(); + }, + 'connectionAttempts < 1'(test: Test) { + const stack = new cdk.Stack(); + test.throws(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionAttempts: 0, + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionAttempts: You can specify 1, 2, or 3 as the number of attempts./); + test.done(); + }, + '3 < connectionAttempts'(test: Test) { + const stack = new cdk.Stack(); + test.throws(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionAttempts: 4, + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionAttempts: You can specify 1, 2, or 3 as the number of attempts./); + test.done(); + }, + 'connectionTimeout = 1.1'(test: Test) { + const stack = new cdk.Stack(); + test.throws(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionTimeout: cdk.Duration.seconds(1.1), + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionTimeout: You can specify a number of seconds between 1 and 10 \(inclusive\)./); + test.done(); + }, + 'connectionTimeout < 1'(test: Test) { + const stack = new cdk.Stack(); + test.throws(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionTimeout: cdk.Duration.seconds(0), + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionTimeout: You can specify a number of seconds between 1 and 10 \(inclusive\)./); + test.done(); + }, + '10 < connectionTimeout'(test: Test) { + const stack = new cdk.Stack(); + test.throws(() => { + new CloudFrontWebDistribution(stack, 'Distribution', { + originConfigs: [{ + behaviors: [{ isDefaultBehavior: true }], + connectionTimeout: cdk.Duration.seconds(11), + customOriginSource: { domainName: 'myorigin.com' }, + }], + }); + }, /connectionTimeout: You can specify a number of seconds between 1 and 10 \(inclusive\)./); + test.done(); + }, + }, + }, +}); diff --git a/packages/@aws-cdk/aws-cloudtrail/package.json b/packages/@aws-cdk/aws-cloudtrail/package.json index f024b97b7b532..0929ba1212f6d 100644 --- a/packages/@aws-cdk/aws-cloudtrail/package.json +++ b/packages/@aws-cdk/aws-cloudtrail/package.json @@ -64,7 +64,7 @@ "license": "Apache-2.0", "devDependencies": { "@aws-cdk/assert": "0.0.0", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", diff --git a/packages/@aws-cdk/aws-codebuild/package.json b/packages/@aws-cdk/aws-codebuild/package.json index 4c67386790f41..ee541d561b01a 100644 --- a/packages/@aws-cdk/aws-codebuild/package.json +++ b/packages/@aws-cdk/aws-codebuild/package.json @@ -70,7 +70,7 @@ "@aws-cdk/aws-sns": "0.0.0", "@aws-cdk/aws-sqs": "0.0.0", "@types/nodeunit": "^0.0.31", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", diff --git a/packages/@aws-cdk/aws-codecommit/package.json b/packages/@aws-cdk/aws-codecommit/package.json index ecc3e43d5f6a6..b404a16c2e1fe 100644 --- a/packages/@aws-cdk/aws-codecommit/package.json +++ b/packages/@aws-cdk/aws-codecommit/package.json @@ -70,7 +70,7 @@ "@aws-cdk/assert": "0.0.0", "@aws-cdk/aws-sns": "0.0.0", "@types/nodeunit": "^0.0.31", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", diff --git a/packages/@aws-cdk/aws-codepipeline-actions/README.md b/packages/@aws-cdk/aws-codepipeline-actions/README.md index 327a7fd289809..0c4a3908761eb 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/README.md +++ b/packages/@aws-cdk/aws-codepipeline-actions/README.md @@ -14,9 +14,9 @@ import * as codepipeline from '@aws-cdk/aws-codepipeline'; import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; ``` -### Sources +## Sources -#### AWS CodeCommit +### AWS CodeCommit To use a CodeCommit Repository in a CodePipeline: @@ -62,7 +62,14 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### GitHub +### GitHub + +If you want to use a GitHub repository as the source, you must create: + +* A [GitHub Access Token](https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line) +* A [Secrets Manager PlainText Secret](https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_create-basic-secret.html) + with the value of the **GitHub Access Token**. Pick whatever name you want + (for example `my-github-token`) and pass it as the argument of `oauthToken`. To use GitHub as the source of a CodePipeline: @@ -104,7 +111,7 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### BitBucket +### BitBucket CodePipeline can use a BitBucket Git repository as a source: @@ -135,7 +142,7 @@ const sourceAction = new codepipeline_actions.BitBucketSourceAction({ the above class `BitBucketSourceAction` is experimental - we reserve the right to make breaking changes to it. -#### AWS S3 +### AWS S3 To use an S3 Bucket as a source in CodePipeline: @@ -205,7 +212,7 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### AWS ECR +### AWS ECR To use an ECR Repository as a source in a Pipeline: @@ -246,9 +253,9 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -### Build & test +## Build & test -#### AWS CodeBuild +### AWS CodeBuild Example of a CodeBuild Project used in a Pipeline, alongside CodeCommit: @@ -301,7 +308,7 @@ const testAction = new codepipeline_actions.CodeBuildAction({ }); ``` -##### Multiple inputs and outputs +#### Multiple inputs and outputs When you want to have multiple inputs and/or outputs for a Project used in a Pipeline, instead of using the `secondarySources` and `secondaryArtifacts` @@ -375,7 +382,7 @@ const project = new codebuild.PipelineProject(this, 'MyProject', { }); ``` -##### Variables +#### Variables The CodeBuild action emits variables. Unlike many other actions, the variables are not static, @@ -399,7 +406,7 @@ const buildAction = new codepipeline_actions.CodeBuildAction({ build: { commands: 'export MY_VAR="some value"', }, - }, + }, }), }), variablesNamespace: 'MyNamespace', // optional - by default, a name will be generated for you @@ -417,7 +424,7 @@ new codepipeline_actions.CodeBuildAction({ }); ``` -#### Jenkins +### Jenkins In order to use Jenkins Actions in the Pipeline, you first need to create a `JenkinsProvider`: @@ -459,9 +466,9 @@ const buildAction = new codepipeline_actions.JenkinsAction({ }); ``` -### Deploy +## Deploy -#### AWS CloudFormation +### AWS CloudFormation This module contains Actions that allows you to deploy to CloudFormation from AWS CodePipeline. @@ -497,7 +504,7 @@ using a CloudFormation CodePipeline Action. Example: [Example of deploying a Lambda through CodePipeline](test/integ.lambda-deployed-through-codepipeline.lit.ts) -##### Cross-account actions +#### Cross-account actions If you want to update stacks in a different account, pass the `account` property when creating the action: @@ -534,9 +541,9 @@ new codepipeline_actions.CloudFormationCreateUpdateStackAction({ }); ``` -#### AWS CodeDeploy +### AWS CodeDeploy -##### Server deployments +#### Server deployments To use CodeDeploy for EC2/on-premise deployments in a Pipeline: @@ -589,7 +596,7 @@ where you will define your Pipeline, and deploy the `lambdaStack` using a CloudFormation CodePipeline Action (see above for a complete example). -#### ECS +### ECS CodePipeline can deploy an ECS service. The deploy Action receives one input Artifact which contains the [image definition file]: @@ -616,7 +623,7 @@ const deployStage = pipeline.addStage({ [image definition file]: https://docs.aws.amazon.com/codepipeline/latest/userguide/pipelines-create.html#pipelines-create-image-definitions -#### AWS S3 +### AWS S3 To use an S3 Bucket as a deployment target in CodePipeline: @@ -636,7 +643,7 @@ const deployStage = pipeline.addStage({ }); ``` -#### Alexa Skill +### Alexa Skill You can deploy to Alexa using CodePipeline with the following Action: @@ -687,9 +694,9 @@ new codepipeline_actions.AlexaSkillDeployAction({ }); ``` -### Approve & invoke +## Approve & invoke -#### Manual approval Action +### Manual approval Action This package contains an Action that stops the Pipeline until someone manually clicks the approve button: @@ -712,7 +719,7 @@ but `notifyEmails` were, a new SNS Topic will be created (and accessible through the `notificationTopic` property of the Action). -#### AWS Lambda +### AWS Lambda This module contains an Action that allows you to invoke a Lambda function in a Pipeline: diff --git a/packages/@aws-cdk/aws-codepipeline-actions/lib/s3/deploy-action.ts b/packages/@aws-cdk/aws-codepipeline-actions/lib/s3/deploy-action.ts index 7168719bde5f6..0258c5077a2ae 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/lib/s3/deploy-action.ts +++ b/packages/@aws-cdk/aws-codepipeline-actions/lib/s3/deploy-action.ts @@ -30,9 +30,9 @@ export class CacheControl { /** The 'proxy-revalidate' cache control directive. */ public static proxyRevalidate() { return new CacheControl('proxy-revalidate'); } /** The 'max-age' cache control directive. */ - public static maxAge(t: Duration) { return new CacheControl(`max-age: ${t.toSeconds()}`); } + public static maxAge(t: Duration) { return new CacheControl(`max-age=${t.toSeconds()}`); } /** The 's-max-age' cache control directive. */ - public static sMaxAge(t: Duration) { return new CacheControl(`s-maxage: ${t.toSeconds()}`); } + public static sMaxAge(t: Duration) { return new CacheControl(`s-maxage=${t.toSeconds()}`); } /** * Allows you to create an arbitrary cache control directive, * in case our support is missing a method for a particular directive. diff --git a/packages/@aws-cdk/aws-codepipeline-actions/package.json b/packages/@aws-cdk/aws-codepipeline-actions/package.json index b34c52fc4c7ec..4767b49a361b0 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/package.json +++ b/packages/@aws-cdk/aws-codepipeline-actions/package.json @@ -68,7 +68,7 @@ "@types/nodeunit": "^0.0.31", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", - "lodash": "^4.17.15", + "lodash": "^4.17.19", "nodeunit": "^0.11.3", "pkglint": "0.0.0" }, diff --git a/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-s3-deploy.expected.json b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-s3-deploy.expected.json index 366c152690322..63f2734f64275 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-s3-deploy.expected.json +++ b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-s3-deploy.expected.json @@ -319,7 +319,7 @@ }, "Extract": "true", "CannedACL": "public-read", - "CacheControl": "public, max-age: 43200" + "CacheControl": "public, max-age=43200" }, "InputArtifacts": [ { diff --git a/packages/@aws-cdk/aws-codepipeline-actions/test/s3/test.s3-deploy-action.ts b/packages/@aws-cdk/aws-codepipeline-actions/test/s3/test.s3-deploy-action.ts index dec66faad9ed6..37ad397848a51 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/test/s3/test.s3-deploy-action.ts +++ b/packages/@aws-cdk/aws-codepipeline-actions/test/s3/test.s3-deploy-action.ts @@ -108,6 +108,7 @@ export = { cacheControl: [ cpactions.CacheControl.setPublic(), cpactions.CacheControl.maxAge(Duration.hours(12)), + cpactions.CacheControl.sMaxAge(Duration.hours(12)), ], }); @@ -118,7 +119,7 @@ export = { 'Actions': [ { 'Configuration': { - 'CacheControl': 'public, max-age: 43200', + 'CacheControl': 'public, max-age=43200, s-maxage=43200', }, }, ], diff --git a/packages/@aws-cdk/aws-dynamodb/package.json b/packages/@aws-cdk/aws-dynamodb/package.json index 14ed1894e0ad4..a0924511d32d8 100644 --- a/packages/@aws-cdk/aws-dynamodb/package.json +++ b/packages/@aws-cdk/aws-dynamodb/package.json @@ -65,7 +65,7 @@ "devDependencies": { "@aws-cdk/assert": "0.0.0", "@types/jest": "^26.0.3", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "aws-sdk-mock": "^5.1.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", diff --git a/packages/@aws-cdk/aws-ec2/lib/vpc-endpoint.ts b/packages/@aws-cdk/aws-ec2/lib/vpc-endpoint.ts index a49ff09fe6634..3767f348ce81a 100644 --- a/packages/@aws-cdk/aws-ec2/lib/vpc-endpoint.ts +++ b/packages/@aws-cdk/aws-ec2/lib/vpc-endpoint.ts @@ -1,6 +1,6 @@ import * as iam from '@aws-cdk/aws-iam'; import * as cxschema from '@aws-cdk/cloud-assembly-schema'; -import { Aws, Construct, ContextProvider, IResource, Lazy, Resource, Token } from '@aws-cdk/core'; +import { Aws, Construct, ContextProvider, IResource, Lazy, Resource, Stack, Token } from '@aws-cdk/core'; import { Connections, IConnectable } from './connections'; import { CfnVPCEndpoint } from './ec2.generated'; import { Peer } from './peer'; @@ -315,7 +315,10 @@ export class InterfaceVpcEndpointAwsService implements IInterfaceVpcEndpointServ public readonly privateDnsDefault?: boolean = true; constructor(name: string, prefix?: string, port?: number) { - this.name = `${prefix || 'com.amazonaws'}.${Aws.REGION}.${name}`; + const region = Lazy.stringValue({ + produce: (context) => Stack.of(context.scope).region, + }); + this.name = `${prefix || 'com.amazonaws'}.${region}.${name}`; this.port = port || 443; } } @@ -471,24 +474,8 @@ export class InterfaceVpcEndpoint extends VpcEndpoint implements IInterfaceVpcEn this.connections.allowDefaultPortFrom(Peer.ipv4(props.vpc.vpcCidrBlock)); } - const lookupSupportedAzs = props.lookupSupportedAzs ?? false; - const subnetSelection = props.vpc.selectSubnets({ ...props.subnets, onePerAz: true }); - let subnets; - - // If we don't have an account/region, we will not be able to do filtering on AZs since - // they will be undefined - // Otherwise, we filter by AZ - const agnostic = (Token.isUnresolved(this.stack.account) || Token.isUnresolved(this.stack.region)); - - if (agnostic && lookupSupportedAzs) { - throw new Error('Cannot look up VPC endpoint availability zones if account/region are not specified'); - } else if (!agnostic && lookupSupportedAzs) { - const availableAZs = this.availableAvailabilityZones(props.service.name); - subnets = subnetSelection.subnets.filter(s => availableAZs.includes(s.availabilityZone)); - } else { - subnets = subnetSelection.subnets; - } - const subnetIds = subnets.map(s => s.subnetId); + // Determine which subnets to place the endpoint in + const subnetIds = this.endpointSubnets(props); const endpoint = new CfnVPCEndpoint(this, 'Resource', { privateDnsEnabled: props.privateDnsEnabled ?? props.service.privateDnsDefault ?? true, @@ -506,6 +493,61 @@ export class InterfaceVpcEndpoint extends VpcEndpoint implements IInterfaceVpcEn this.vpcEndpointNetworkInterfaceIds = endpoint.attrNetworkInterfaceIds; } + /** + * Determine which subnets to place the endpoint in. This is in its own function + * because there's a lot of code. + */ + private endpointSubnets(props: InterfaceVpcEndpointProps) { + const lookupSupportedAzs = props.lookupSupportedAzs ?? false; + const subnetSelection = props.vpc.selectSubnets({ ...props.subnets, onePerAz: true }); + + // If we don't have an account/region, we will not be able to do filtering on AZs since + // they will be undefined + const agnosticAcct = Token.isUnresolved(this.stack.account); + const agnosticRegion = Token.isUnresolved(this.stack.region); + + // Some service names, such as AWS service name references, use Tokens to automatically + // fill in the region + // If it is an InterfaceVpcEndpointAwsService, then the reference will be resolvable since + // only references the region + const isAwsService = Token.isUnresolved(props.service.name) && props.service instanceof InterfaceVpcEndpointAwsService; + + // Determine what name we pass to the context provider, either the verbatim name + // or a resolved version if it is an AWS service reference + let lookupServiceName = props.service.name; + if (isAwsService && !agnosticRegion) { + lookupServiceName = Stack.of(this).resolve(props.service.name); + } else { + // It's an agnostic service and we don't know how to resolve it. + // This is ok if the stack is region agnostic and we're not looking up + // AZs + lookupServiceName = props.service.name; + } + + // Check if lookup is impossible and throw an appropriate error + // Context provider cannot make an AWS call without an account/region + if ((agnosticAcct || agnosticRegion) && lookupSupportedAzs) { + throw new Error('Cannot look up VPC endpoint availability zones if account/region are not specified'); + } + // Context provider doesn't know the name of the service if there is a Token + // in the name + const agnosticService = Token.isUnresolved(lookupServiceName); + if (agnosticService && lookupSupportedAzs) { + throw new Error(`Cannot lookup AZs for a service name with a Token: ${props.service.name}`); + } + + // Here we do the actual lookup for AZs, if told to do so + let subnets; + if (lookupSupportedAzs) { + const availableAZs = this.availableAvailabilityZones(lookupServiceName); + subnets = subnetSelection.subnets.filter(s => availableAZs.includes(s.availabilityZone)); + } else { + subnets = subnetSelection.subnets; + } + const subnetIds = subnets.map(s => s.subnetId); + return subnetIds; + } + private availableAvailabilityZones(serviceName: string): string[] { // Here we check what AZs the endpoint service is available in // If for whatever reason we can't retrieve the AZs, and no context is set, diff --git a/packages/@aws-cdk/aws-ec2/test/vpc-endpoint.test.ts b/packages/@aws-cdk/aws-ec2/test/vpc-endpoint.test.ts index 30c6018c961ac..60d18187027b1 100644 --- a/packages/@aws-cdk/aws-ec2/test/vpc-endpoint.test.ts +++ b/packages/@aws-cdk/aws-ec2/test/vpc-endpoint.test.ts @@ -470,6 +470,49 @@ nodeunitShim({ ], })); + test.done(); + }, + 'test endpoint service context with aws service'(test: Test) { + // GIVEN + const stack = new Stack(undefined, 'TestStack', { env: { account: '123456789012', region: 'us-east-1' } }); + + // Setup context for stack AZs + stack.node.setContext( + ContextProvider.getKey(stack, { + provider: cxschema.ContextProvider.AVAILABILITY_ZONE_PROVIDER, + }).key, + ['us-east-1a', 'us-east-1b', 'us-east-1c']); + // Setup context for endpoint service AZs + stack.node.setContext( + ContextProvider.getKey(stack, { + provider: cxschema.ContextProvider.ENDPOINT_SERVICE_AVAILABILITY_ZONE_PROVIDER, + props: { + serviceName: 'com.amazonaws.us-east-1.execute-api', + }, + }).key, + ['us-east-1a', 'us-east-1c']); + + const vpc = new Vpc(stack, 'VPC'); + + // WHEN + vpc.addInterfaceEndpoint('API Gateway', { + service: InterfaceVpcEndpointAwsService.APIGATEWAY, + lookupSupportedAzs: true, + }); + + // THEN + expect(stack).to(haveResource('AWS::EC2::VPCEndpoint', { + ServiceName: 'com.amazonaws.us-east-1.execute-api', + SubnetIds: [ + { + Ref: 'VPCPrivateSubnet1Subnet8BCA10E0', + }, + { + Ref: 'VPCPrivateSubnet3Subnet3EDCD457', + }, + ], + })); + test.done(); }, }, diff --git a/packages/@aws-cdk/aws-ecs/README.md b/packages/@aws-cdk/aws-ecs/README.md index 702c4617bace2..75c4ac3698e14 100644 --- a/packages/@aws-cdk/aws-ecs/README.md +++ b/packages/@aws-cdk/aws-ecs/README.md @@ -474,7 +474,7 @@ const taskDefinition = new ecs.Ec2TaskDefinition(this, 'TaskDef'); taskDefinition.addContainer('TheContainer', { image: ecs.ContainerImage.fromRegistry('example-image'), memoryLimitMiB: 256, - logging: ecs.LogDrivers.awslogs({ streamPrefix: 'EventDemo' }) + logging: ecs.LogDrivers.awsLogs({ streamPrefix: 'EventDemo' }) }); ``` diff --git a/packages/@aws-cdk/aws-eks/package.json b/packages/@aws-cdk/aws-eks/package.json index af3f49fd3627c..f727e639aadd4 100644 --- a/packages/@aws-cdk/aws-eks/package.json +++ b/packages/@aws-cdk/aws-eks/package.json @@ -65,7 +65,7 @@ "@aws-cdk/assert": "0.0.0", "@types/nodeunit": "^0.0.31", "@types/yaml": "1.2.0", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", diff --git a/packages/@aws-cdk/aws-events-targets/package.json b/packages/@aws-cdk/aws-events-targets/package.json index d22709f9f8f16..ddfb28e66d9b8 100644 --- a/packages/@aws-cdk/aws-events-targets/package.json +++ b/packages/@aws-cdk/aws-events-targets/package.json @@ -68,7 +68,7 @@ "devDependencies": { "@aws-cdk/assert": "0.0.0", "@aws-cdk/aws-codecommit": "0.0.0", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "aws-sdk-mock": "^5.1.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", diff --git a/packages/@aws-cdk/aws-iam/lib/role.ts b/packages/@aws-cdk/aws-iam/lib/role.ts index e62dcd45af0be..8b62425be50c6 100644 --- a/packages/@aws-cdk/aws-iam/lib/role.ts +++ b/packages/@aws-cdk/aws-iam/lib/role.ts @@ -177,10 +177,9 @@ export class Role extends Resource implements IRole { const parsedArn = scopeStack.parseArn(roleArn); const resourceName = parsedArn.resourceName!; // service roles have an ARN like 'arn:aws:iam:::role/service-role/' - // we want to support these as well, so strip out the 'service-role/' prefix if we see it - const roleName = resourceName.startsWith('service-role/') - ? resourceName.slice('service-role/'.length) - : resourceName; + // or 'arn:aws:iam:::role/service-role/servicename.amazonaws.com/service-role/' + // we want to support these as well, so we just use the element after the last slash as role name + const roleName = resourceName.split('/').pop()!; class Import extends Resource implements IRole { public readonly grantPrincipal: IPrincipal = this; diff --git a/packages/@aws-cdk/aws-iam/test/role.from-role-arn.test.ts b/packages/@aws-cdk/aws-iam/test/role.from-role-arn.test.ts index c168194c08766..528cacee14faa 100644 --- a/packages/@aws-cdk/aws-iam/test/role.from-role-arn.test.ts +++ b/packages/@aws-cdk/aws-iam/test/role.from-role-arn.test.ts @@ -480,20 +480,45 @@ describe('IAM Role.fromRoleArn', () => { describe('imported with the ARN of a service role', () => { beforeEach(() => { roleStack = new Stack(); - importedRole = Role.fromRoleArn(roleStack, 'Role', - `arn:aws:iam::${roleAccount}:role/service-role/codebuild-role`); }); - it("correctly strips the 'service-role' prefix from the role name", () => { - new Policy(roleStack, 'Policy', { - statements: [somePolicyStatement()], - roles: [importedRole], + describe('without a service principal in the role name', () => { + beforeEach(() => { + importedRole = Role.fromRoleArn(roleStack, 'Role', + `arn:aws:iam::${roleAccount}:role/service-role/codebuild-role`); + }); + + it("correctly strips the 'service-role' prefix from the role name", () => { + new Policy(roleStack, 'Policy', { + statements: [somePolicyStatement()], + roles: [importedRole], + }); + + expect(roleStack).toHaveResourceLike('AWS::IAM::Policy', { + 'Roles': [ + 'codebuild-role', + ], + }); }); + }); - expect(roleStack).toHaveResourceLike('AWS::IAM::Policy', { - 'Roles': [ - 'codebuild-role', - ], + describe('with a service principal in the role name', () => { + beforeEach(() => { + importedRole = Role.fromRoleArn(roleStack, 'Role', + `arn:aws:iam::${roleAccount}:role/aws-service-role/anyservice.amazonaws.com/codebuild-role`); + }); + + it("correctly strips both the 'aws-service-role' prefix and the service principal from the role name", () => { + new Policy(roleStack, 'Policy', { + statements: [somePolicyStatement()], + roles: [importedRole], + }); + + expect(roleStack).toHaveResourceLike('AWS::IAM::Policy', { + 'Roles': [ + 'codebuild-role', + ], + }); }); }); }); diff --git a/packages/@aws-cdk/aws-lambda/README.md b/packages/@aws-cdk/aws-lambda/README.md index 55ada8d6da307..4ca6c84b320a2 100644 --- a/packages/@aws-cdk/aws-lambda/README.md +++ b/packages/@aws-cdk/aws-lambda/README.md @@ -280,7 +280,7 @@ the log retention to never expire even if it was configured with a different val ### FileSystem Access You can configure a function to mount an Amazon Elastic File System (Amazon EFS) to a -directory in your runtime environment with the `filesystem` property. To access Amaozn EFS +directory in your runtime environment with the `filesystem` property. To access Amazon EFS from lambda function, the Amazon EFS access point will be required. The following sample allows the lambda function to mount the Amazon EFS access point to `/mnt/msg` in the runtime environment and access the filesystem with the POSIX identity defined in `posixUser`. diff --git a/packages/@aws-cdk/aws-lambda/package.json b/packages/@aws-cdk/aws-lambda/package.json index 115b656e62759..f1eaa51eb1c58 100644 --- a/packages/@aws-cdk/aws-lambda/package.json +++ b/packages/@aws-cdk/aws-lambda/package.json @@ -71,12 +71,12 @@ "@types/lodash": "^4.14.157", "@types/nodeunit": "^0.0.31", "@types/sinon": "^9.0.4", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "aws-sdk-mock": "^5.1.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", - "lodash": "^4.17.15", + "lodash": "^4.17.19", "nock": "^13.0.2", "nodeunit": "^0.11.3", "pkglint": "0.0.0", diff --git a/packages/@aws-cdk/aws-route53-targets/test/integ.cloudfront-alias-target.expected.json b/packages/@aws-cdk/aws-route53-targets/test/integ.cloudfront-alias-target.expected.json index 76bd0647bca15..7a91ba087aaab 100644 --- a/packages/@aws-cdk/aws-route53-targets/test/integ.cloudfront-alias-target.expected.json +++ b/packages/@aws-cdk/aws-route53-targets/test/integ.cloudfront-alias-target.expected.json @@ -59,6 +59,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "DomainName": { "Fn::GetAtt": [ "Bucket83908E77", diff --git a/packages/@aws-cdk/aws-route53/package.json b/packages/@aws-cdk/aws-route53/package.json index 40f53d8bbee5d..df1e7ca25d7e1 100644 --- a/packages/@aws-cdk/aws-route53/package.json +++ b/packages/@aws-cdk/aws-route53/package.json @@ -64,7 +64,7 @@ "devDependencies": { "@aws-cdk/assert": "0.0.0", "@types/nodeunit": "^0.0.31", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", diff --git a/packages/@aws-cdk/aws-s3-deployment/test/integ.bucket-deployment-cloudfront.expected.json b/packages/@aws-cdk/aws-s3-deployment/test/integ.bucket-deployment-cloudfront.expected.json index e5e0dfe714f00..7563384f9b9f0 100644 --- a/packages/@aws-cdk/aws-s3-deployment/test/integ.bucket-deployment-cloudfront.expected.json +++ b/packages/@aws-cdk/aws-s3-deployment/test/integ.bucket-deployment-cloudfront.expected.json @@ -34,6 +34,8 @@ "IPV6Enabled": true, "Origins": [ { + "ConnectionAttempts": 3, + "ConnectionTimeout": 10, "DomainName": { "Fn::GetAtt": [ "Destination3E3DC043D", diff --git a/packages/@aws-cdk/aws-sqs/package.json b/packages/@aws-cdk/aws-sqs/package.json index d5b79ede55998..431082da0886c 100644 --- a/packages/@aws-cdk/aws-sqs/package.json +++ b/packages/@aws-cdk/aws-sqs/package.json @@ -65,7 +65,7 @@ "@aws-cdk/assert": "0.0.0", "@aws-cdk/aws-s3": "0.0.0", "@types/nodeunit": "^0.0.31", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", "cfn2ts": "0.0.0", diff --git a/packages/@aws-cdk/aws-stepfunctions/README.md b/packages/@aws-cdk/aws-stepfunctions/README.md index 7a74e563b4cff..fc1ee6b294dcd 100644 --- a/packages/@aws-cdk/aws-stepfunctions/README.md +++ b/packages/@aws-cdk/aws-stepfunctions/README.md @@ -137,7 +137,7 @@ will be passed as the state's output. ```ts // Makes the current JSON state { ..., "subObject": { "hello": "world" } } const pass = new stepfunctions.Pass(this, 'Add Hello World', { - result: { hello: 'world' }, + result: stepfunctions.Result.fromObject({ hello: 'world' }), resultPath: '$.subObject', }); diff --git a/packages/@aws-cdk/cloudformation-diff/package.json b/packages/@aws-cdk/cloudformation-diff/package.json index 3196b202c7f27..42b5069f8a784 100644 --- a/packages/@aws-cdk/cloudformation-diff/package.json +++ b/packages/@aws-cdk/cloudformation-diff/package.json @@ -33,7 +33,7 @@ "@types/string-width": "^4.0.1", "@types/table": "^4.0.7", "cdk-build-tools": "0.0.0", - "fast-check": "^1.25.1", + "fast-check": "^1.26.0", "jest": "^25.5.4", "pkglint": "0.0.0", "ts-jest": "^26.1.1" diff --git a/packages/@aws-cdk/core/package.json b/packages/@aws-cdk/core/package.json index 4b660f4e83c42..b070e10d52b1a 100644 --- a/packages/@aws-cdk/core/package.json +++ b/packages/@aws-cdk/core/package.json @@ -158,8 +158,8 @@ "@types/sinon": "^9.0.4", "cdk-build-tools": "0.0.0", "cfn2ts": "0.0.0", - "fast-check": "^1.25.1", - "lodash": "^4.17.15", + "fast-check": "^1.26.0", + "lodash": "^4.17.19", "nodeunit": "^0.11.3", "pkglint": "0.0.0", "sinon": "^9.0.2", diff --git a/packages/@aws-cdk/custom-resources/package.json b/packages/@aws-cdk/custom-resources/package.json index fb368e9fe72c1..80a2835a68fbb 100644 --- a/packages/@aws-cdk/custom-resources/package.json +++ b/packages/@aws-cdk/custom-resources/package.json @@ -73,7 +73,7 @@ "@types/aws-lambda": "^8.10.39", "@types/fs-extra": "^8.1.0", "@types/sinon": "^9.0.4", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "aws-sdk-mock": "^5.1.0", "cdk-build-tools": "0.0.0", "cdk-integ-tools": "0.0.0", diff --git a/packages/@aws-cdk/pipelines/.eslintrc.js b/packages/@aws-cdk/pipelines/.eslintrc.js new file mode 100644 index 0000000000000..61dd8dd001f63 --- /dev/null +++ b/packages/@aws-cdk/pipelines/.eslintrc.js @@ -0,0 +1,3 @@ +const baseConfig = require('cdk-build-tools/config/eslintrc'); +baseConfig.parserOptions.project = __dirname + '/tsconfig.json'; +module.exports = baseConfig; diff --git a/packages/@aws-cdk/pipelines/.gitignore b/packages/@aws-cdk/pipelines/.gitignore new file mode 100644 index 0000000000000..32a10d785e8fb --- /dev/null +++ b/packages/@aws-cdk/pipelines/.gitignore @@ -0,0 +1,16 @@ +*.js +tsconfig.json +*.js.map +*.d.ts +*.generated.ts +dist +lib/generated/resources.ts +.jsii + +.LAST_BUILD +.nyc_output +coverage +nyc.config.js +.LAST_PACKAGE +*.snk +!.eslintrc.js diff --git a/packages/@aws-cdk/pipelines/.npmignore b/packages/@aws-cdk/pipelines/.npmignore new file mode 100644 index 0000000000000..fe4df9a06d9a9 --- /dev/null +++ b/packages/@aws-cdk/pipelines/.npmignore @@ -0,0 +1,24 @@ +# Don't include original .ts files when doing `npm pack` +*.ts +!*.d.ts +coverage +.nyc_output +*.tgz + +dist +.LAST_PACKAGE +.LAST_BUILD +!*.js + +# Include .jsii +!.jsii + +*.snk + +*.tsbuildinfo + +tsconfig.json +.eslintrc.js + +# exclude cdk artifacts +**/cdk.out \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/LICENSE b/packages/@aws-cdk/pipelines/LICENSE new file mode 100644 index 0000000000000..b71ec1688783a --- /dev/null +++ b/packages/@aws-cdk/pipelines/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/@aws-cdk/pipelines/NOTICE b/packages/@aws-cdk/pipelines/NOTICE new file mode 100644 index 0000000000000..bfccac9a7f69c --- /dev/null +++ b/packages/@aws-cdk/pipelines/NOTICE @@ -0,0 +1,2 @@ +AWS Cloud Development Kit (AWS CDK) +Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. diff --git a/packages/@aws-cdk/pipelines/README.md b/packages/@aws-cdk/pipelines/README.md new file mode 100644 index 0000000000000..6b171a06dea98 --- /dev/null +++ b/packages/@aws-cdk/pipelines/README.md @@ -0,0 +1,591 @@ +# CDK Pipelines + +--- + +![cdk-constructs: Developer Preview](https://img.shields.io/badge/cdk--constructs-developer--preview-informational.svg?style=for-the-badge) + +> The APIs of higher level constructs in this module are in **developer preview** before they become stable. We will only make breaking changes to address unforeseen API issues. Therefore, these APIs are not subject to [Semantic Versioning](https://semver.org/), and breaking changes will be announced in release notes. This means that while you may use them, you may need to update your source code when upgrading to a newer version of this package. + +--- + + + +A construct library for painless Continuous Delivery of CDK applications. + +![Developer Preview](https://img.shields.io/badge/developer--preview-informational.svg?style=for-the-badge) + +> This module is in **developer preview**. We may make breaking changes to address unforeseen API issues. Therefore, these APIs are not subject to [Semantic Versioning](https://semver.org/), and breaking changes will be announced in release notes. This means that while you may use them, you may need to update your source code when upgrading to a newer version of this package. + +## At a glance + +Defining a pipeline for your application is as simple as defining a subclass +of `Stage`, and calling `pipeline.addApplicationStage()` with instances of +that class. Deploying to a different account or region looks exactly the +same, the *CDK Pipelines* library takes care of the details. + +(Note that have to *bootstrap* all environments before the following code +will work, see the section **CDK Environment Bootstrapping** below). + +```ts +import { Construct, Stage } from '@aws-cdk/core'; + +/** + * Your application + * + * May consist of one or more Stacks + */ +class MyApplication extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const dbStack = new DatabaseStack(this, 'Database'); + new ComputeStack(this, 'Compute', { + table: dbStack.table, + }); + } +} + +/** + * Stack to hold the pipeline + */ +class MyPipelineStack extends Stack { + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + const sourceArtifact = new codepipeline.Artifact(); + const cloudAssemblyArtifact = new codepipeline.Artifact(); + + const pipeline = new CdkPipeline(this, 'Pipeline', { + // ...source and build information here (see below) + }); + + // Do this as many times as necessary with any account and region + // Account and region may different from the pipeline's. + pipeline.addApplicationStage(new MyApplication(this, 'Prod', { + env: { + account: '123456789012', + region: 'eu-west-1', + } + })); + } +} +``` + +The pipeline is **self-mutating**, which means that if you add new +application stages in the source code, or new stacks to `MyApplication`, the +pipeline will automatically reconfigure itself to deploy those new stages and +stacks. + +## CDK Versioning + +This library uses prerelease features of the CDK framework, which can be enabled by adding the +following to `cdk.json`: + +``` +{ + ... + "context": { + "@aws-cdk/core:newStyleStackSynthesis": true + } +} +``` + +## Defining the Pipeline (Source and Synth) + +The pipeline is defined by instantiating `CdkPipeline` in a Stack. This defines the +source location for the pipeline as well as the build commands. For example, the following +defines a pipeline whose source is stored in a GitHub repository, and uses NPM +to build. The Pipeline will be provisioned in account `111111111111` and region +`eu-west-1`: + +```ts +class MyPipelineStack extends Stack { + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + const sourceArtifact = new codepipeline.Artifact(); + const cloudAssemblyArtifact = new codepipeline.Artifact(); + + const pipeline = new CdkPipeline(this, 'Pipeline', { + pipelineName: 'MyAppPipeline', + cloudAssemblyArtifact, + + sourceAction: new codepipeline_actions.GitHubSourceAction({ + actionName: 'GitHub', + output: sourceArtifact, + oauthToken: SecretValue.secretsManager('GITHUB_TOKEN_NAME'), + trigger: codepipeline_actions.GitHubTrigger.POLL, + // Replace these with your actual GitHub project name + owner: 'OWNER', + repo: 'REPO', + }), + + synthAction: SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + + // Use this if you need a build step (if you're not using ts-node + // or if you have TypeScript Lambdas that need to be compiled). + buildCommand: 'npm run build', + }), + }); + } +} + +const app = new App(); +new MyPipelineStack(this, 'PipelineStack', { + env: { + account: '111111111111', + region: 'eu-west-1', + } +}); +``` + +## Initial pipeline deployment + +You provision this pipeline by making sure the target environment has been +bootstrapped (see below), and then executing deploying the `PipelineStack` +*once*. Afterwards, the pipeline will keep itself up-to-date. + +> **Important**: be sure to `git commit` and `git push` before deploying the +> Pipeline stack using `cdk deploy`! +> +> The reason is that the pipeline will start deploying and self-mutating +> right away based on the sources in the repository, so the sources it finds +> in there should be the ones you want it to find. + +Run the following commands to get the pipeline going: + +``` +$ git commit -a +$ git push +$ cdk deploy PipelineStack +``` + +Administrative permissions to the account are only necessary up until +this point. We recommend you shed access to these credentials after doing this. + +### Sources + +Any of the regular sources from the [`@aws-cdk/aws-codepipeline-actions`](https://docs.aws.amazon.com/cdk/api/latest/docs/aws-codepipeline-actions-readme.html#github) module can be used. + +### Synths + +You define how to build and synth the project by specifying a `synthAction`. +This can be any CodePipeline action that produces an artifact with a CDK +Cloud Assembly in it (the contents of the `cdk.out` directory created when +`cdk synth` is called). Pass the output artifact of the synth in the +Pipeline's `cloudAssemblyArtifact` property. + +`SimpleSynthAction` is available for synths that can be performed by running a couple +of simple shell commands (install, build, and synth) using AWS CodeBuild. When +using these, the source repository does not need to have a `buildspec.yml`. An example +of using `SimpleSynthAction` to run a Maven build followed by a CDK synth: + +```ts +const pipeline = new CdkPipeline(this, 'Pipeline', { + // ... + synthAction: new SimpleSynthAction({ + sourceArtifact, + cloudAssemblyArtifact, + installCommand: 'npm install -g aws-cdk', + buildCommand: 'mvn package', + synthCommand: 'cdk synth', + }) +}); +``` + +Available as factory functions on `SimpleSynthAction` are some common +convention-based synth: + +* `SimpleSynthAction.standardNpmSynth()`: build using NPM conventions. Expects a `package-lock.json`, + a `cdk.json`, and expects the CLI to be a versioned dependency in `package.json`. Does + not perform a build step by default. +* `CdkSynth.standardYarnSynth()`: build using Yarn conventions. Expects a `yarn.lock` + a `cdk.json`, and expects the CLI to be a versioned dependency in `package.json`. Does + not perform a build step by default. + +If you need a custom build/synth step that is not covered by `SimpleSynthAction`, you can +always add a custom CodeBuild project and pass a corresponding `CodeBuildAction` to the +pipeline. + +## Adding Application Stages + +To define an application that can be added to the pipeline integrally, define a subclass +of `Stage`. The `Stage` can contain one or more stack which make up your application. If +there are dependencies between the stacks, the stacks will automatically be added to the +pipeline in the right order. Stacks that don't depend on each other will be deployed in +parallel. You can add a dependency relationship between stacks by calling +`stack1.addDependency(stack2)`. + +Stages take a default `env` argument which the Stacks inside the Stage will fall back to +if no `env` is defined for them. + +An application is added to the pipeline by calling `addApplicationStage()` with instances +of the Stage. The same class can be instantiated and added to the pipeline multiple times +to define different stages of your DTAP or multi-region application pipeline: + +```ts +// Testing stage +pipeline.addApplicationStage(new MyApplication(this, 'Testing', { + env: { account: '111111111111', region: 'eu-west-1' } +})); + +// Acceptance stage +pipeline.addApplicationStage(new MyApplication(this, 'Acceptance', { + env: { account: '222222222222', region: 'eu-west-1' } +})); + +// Production stage +pipeline.addApplicationStage(new MyApplication(this, 'Production', { + env: { account: '333333333333', region: 'eu-west-1' } +})); +``` + +### More Control + +Every *Application Stage* added by `addApplicationStage()` will lead to the addition of +an individual *Pipeline Stage*, which is subsequently returned. You can add more +actions to the stage by calling `addAction()` on it. For example: + +```ts +const testingStage = pipeline.addApplicationStage(new MyApplication(this, 'Testing', { + env: { account: '111111111111', region: 'eu-west-1' } +})); + +// Add a action -- in this case, a Manual Approval action +// (for illustration purposes: testingStage.addManualApprovalAction() is a +// convenience shorthand that does the same) +testingStage.addAction(new ManualApprovalAction({ + actionName: 'ManualApproval', + runOrder: testingStage.nextSequentialRunOrder(), +})); +``` + +You can also add more than one *Application Stage* to one *Pipeline Stage*. For example: + +```ts +// Create an empty pipeline stage +const testingStage = pipeline.addStage('Testing'); + +// Add two application stages to the same pipeline stage +testingStage.addApplication(new MyApplication1(this, 'MyApp1', { + env: { account: '111111111111', region: 'eu-west-1' } +})); +testingStage.addApplication(new MyApplication2(this, 'MyApp2', { + env: { account: '111111111111', region: 'eu-west-1' } +})); +``` + +## Adding validations to the pipeline + +You can add any type of CodePipeline Action to the pipeline in order to validate +the deployments you are performing. + +The CDK Pipelines construct library comes with a `ShellScriptAction` which uses AWS CodeBuild +to run a set of shell commands (potentially running a test set that comes with your application, +using stack outputs of the deployed stacks). + +In its simplest form, adding validation actions looks like this: + +```ts +const stage = pipeline.addApplicationStage(new MyApplication(/* ... */)); + +stage.addActions(new ShellScriptAction({ + name: 'MyValidation', + commands: ['curl -Ssf https://my.webservice.com/'], + // ... more configuration ... +})); +``` + +### Using CloudFormation Stack Outputs in ShellScriptAction + +Because many CloudFormation deployments result in the generation of resources with unpredictable +names, validations have support for reading back CloudFormation Outputs after a deployment. This +makes it possible to pass (for example) the generated URL of a load balancer to the test set. + +To use Stack Outputs, expose the `CfnOutput` object you're interested in, and +call `pipeline.stackOutput()` on it: + +```ts +class MyLbApplication extends Stage { + public readonly loadBalancerAddress: CfnOutput; + + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const lbStack = new LoadBalancerStack(this, 'Stack'); + + // Or create this in `LoadBalancerStack` directly + this.loadBalancerAddress = new CfnOutput(lbStack, 'LbAddress', { + value: `https://${lbStack.loadBalancer.loadBalancerDnsName}/` + }); + } +} + +const lbApp = new MyLbApplication(this, 'MyApp', { + env: { /* ... */ } +}); +const stage = pipeline.addApplicationStage(lbApp); +stage.addActions(new ShellScriptAction({ + // ... + useOutputs: { + // When the test is executed, this will make $URL contain the + // load balancer address. + URL: pipeline.stackOutput(lbApp.loadBalancerAddress), + } +}); +``` + +### Using additional files in Shell Script Actions + +As part of a validation, you probably want to run a test suite that's more +elaborate than what can be expressed in a couple of lines of shell script. +You can bring additional files into the shell script validation by supplying +the `additionalArtifacts` property. + +Here are some typical examples for how you might want to bring in additional +files from several sources: + +* Directoy from the source repository +* Additional compiled artifacts from the synth step + +#### Additional files from the source repository + +Bringing in additional files from the source repository is appropriate if the +files in the source repository are directly usable in the test (for example, +if they are executable shell scripts themselves). Pass the `sourceArtifact`: + +```ts +const sourceArtifact = new codepipeline.Artifact(); + +const pipeline = new CdkPipeline(this, 'Pipeline', { + // ... +}); + +const validationAction = new ShellScriptAction({ + name: 'TestUsingSourceArtifact', + additionalArtifacts: [sourceArtifact], + + // 'test.sh' comes from the source repository + commands: ['./test.sh'], +}); +``` + +#### Additional files from the synth step + +Getting the additional files from the synth step is appropriate if your +tests need the compilation step that is done as part of synthesis. + +On the synthesis step, specify `additionalArtifacts` to package +additional subdirectories into artifacts, and use the same artifact +in the `ShellScriptAction`'s `additionalArtifacts`: + +```ts +// If you are using additional output artifacts from the synth step, +// they must be named. +const cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); +const integTestsArtifact = new codepipeline.Artifact('IntegTests'); + +const pipeline = new CdkPipeline(this, 'Pipeline', { + synthAction: SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + buildCommand: 'npm run build', + additionalArtifacts: [ + { + directory: 'test', + artifact: integTestsArtifact, + } + ], + }), + // ... +}); + +const validationAction = new ShellScriptAction({ + name: 'TestUsingBuildArtifact', + additionalArtifacts: [integTestsArtifact], + // 'test.js' was produced from 'test/test.ts' during the synth step + commands: ['node ./test.js'], +}); +``` + +## CDK Environment Bootstrapping + +An *environment* is an *(account, region)* pair where you want to deploy a +CDK stack (see +[Environments](https://docs.aws.amazon.com/cdk/latest/guide/environments.html) +in the CDK Developer Guide). In a Continuous Deployment pipeline, there are +at least two environments involved: the environment where the pipeline is +provisioned, and the environment where you want to deploy the application (or +different stages of the application). These can be the same, though best +practices recommend you isolate your different application stages from each +other in different AWS accounts or regions. + +Before you can provision the pipeline, you have to *bootstrap* the environment you want +to create it in. If you are deploying your application to different environments, you +also have to bootstrap those and be sure to add a *trust* relationship. + +> This library requires a newer version of the bootstrapping stack which has +> been updated specifically to support cross-account continous delivery. In the future, +> this new bootstrapping stack will become the default, but for now it is still +> opt-in. +> +> The commands below assume you are running `cdk bootstrap` in a directory +> where `cdk.json` contains the `"@aws-cdk/core:newStyleStackSynthesis": true` +> setting in its context, which will switch to the new bootstrapping stack +> automatically. +> +> If run from another directory, be sure to run the bootstrap command with +> the environment variable `CDK_NEW_BOOTSTRAP=1` set. + +To bootstrap an environment for provisioning the pipeline: + +``` +$ env CDK_NEW_BOOTSTRAP=1 npx cdk bootstrap \ + [--profile admin-profile-1] \ + --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ + aws://111111111111/us-east-1 +``` + +To bootstrap a different environment for deploying CDK applications into using +a pipeline in account `111111111111`: + +``` +$ env CDK_NEW_BOOTSTRAP=1 npx cdk bootstrap \ + [--profile admin-profile-2] \ + --cloudformation-execution-policies arn:aws:iam::aws:policy/AdministratorAccess \ + --trust 11111111111 \ + aws://222222222222/us-east-2 +``` + +These command lines explained: + +* `npx`: means to use the CDK CLI from the current NPM install. If you are using + a global install of the CDK CLI, leave this out. +* `--profile`: should indicate a profile with administrator privileges that has + permissions to provision a pipeline in the indicated account. You can leave this + flag out if either the AWS default credentials or the `AWS_*` environment + variables confer these permissions. +* `--cloudformation-execution-policies`: ARN of the managed policy that future CDK + deployments should execute with. You can tailor this to the needs of your organization + and give more constrained permissions than `AdministratorAccess`. +* `--trust`: indicates which other account(s) should have permissions to deploy + CDK applications into this account. In this case we indicate the Pipeline's account, + but you could also use this for developer accounts (don't do that for production + application accounts though!). +* `aws://222222222222/us-east-2`: the account and region we're bootstrapping. + +> **Security tip**: we recommend that you use administrative credentials to an +> account only to bootstrap it and provision the initial pipeline. Otherwise, +> access to administrative credentials should be dropped as soon as possible. + +### Migrating from old bootstrap stack + +The bootstrap stack is a CloudFormation stack in your account named +**CDKToolkit** that provisions a set of resources required for the CDK +to deploy into that environment. + +The "new" bootstrap stack (obtained by running `cdk bootstrap` with +`CDK_NEW_BOOTSTRAP=1`) is slightly more elaborate than the "old" stack. It +contains: + +* An S3 bucket and ECR repository with predictable names, so that we can reference + assets in these storage locations *without* the use of CloudFormation template + parameters. +* A set of roles with permissions to access these asset locations and to execute + CloudFormation, assumeable from whatever accounts you specify under `--trust`. + +It is possible and safe to migrate from the old bootstrap stack to the new +bootstrap stack. This will create a new S3 file asset bucket in your account +and orphan the old bucket. You should manually delete the orphaned bucket +after you are sure you have redeployed all CDK applications and there are no +more references to the old asset bucket. + +## Security Tips + +It's important to stay safe while employing Continuous Delivery. The CDK Pipelines +library comes with secure defaults to the best of our ability, but by its +very nature the library cannot take care of everything. + +We therefore expect you to mind the following: + +* Maintain dependency hygiene and vet 3rd-party software you use. Any software you + run on your build machine has the ability to change the infrastructure that gets + deployed. Be careful with the software you depend on. + +* Use dependency locking to prevent accidental upgrades! The default `CdkSynths` that + come with CDK Pipelines will expect `package-lock.json` and `yarn.lock` to + ensure your dependencies are the ones you expect. + +* Credentials to production environments should be short-lived. After + bootstrapping and the initial pipeline provisioning, there is no more need for + developers to have access to any of the account credentials; all further + changes can be deployed through git. Avoid the chances of credentials leaking + by not having them in the first place! + +## Troubleshooting + +Here are some common errors you may encounter while using this library. + +### Pipeline: Internal Failure + +If you see the following error during deployment of your pipeline: + +``` +CREATE_FAILED | AWS::CodePipeline::Pipeline | Pipeline/Pipeline +Internal Failure +``` + +There's something wrong with your GitHub access token. It might be missing, or not have the +right permissions to access the repository you're trying to access. + +### Key: Policy contains a statement with one or more invalid principals + +If you see the following error during deployment of your pipeline: + +``` +CREATE_FAILED | AWS::KMS::Key | Pipeline/Pipeline/ArtifactsBucketEncryptionKey +Policy contains a statement with one or more invalid principals. +``` + +One of the target (account, region) environments has not been bootstrapped +with the new bootstrap stack. Check your target environments and make sure +they are all bootstrapped. + +### is in ROLLBACK_COMPLETE state and can not be updated. + +If you see the following error during execution of your pipeline: + +``` +Stack ... is in ROLLBACK_COMPLETE state and can not be updated. (Service: +AmazonCloudFormation; Status Code: 400; Error Code: ValidationError; Request +ID: ...) +``` + +The stack failed its previous deployment, and is in a non-retryable state. +Go into the CloudFormation console, delete the stack, and retry the deployment. + +## Current Limitations + +Limitations that we are aware of and will address: + +* **No context queries**: context queries are not supported. That means that + Vpc.fromLookup() and other functions like it will not work [#8905](https://github.com/aws/aws-cdk/issues/8905). + +## Known Issues + +There are some usability issues that are caused by underlying technology, and +cannot be remedied by CDK at this point. They are reproduced here for completeness. + +- **Console links to other accounts will not work**: the AWS CodePipeline + console will assume all links are relative to the current account. You will + not be able to use the pipeline console to click through to a CloudFormation + stack in a different account. +- **If a change set failed to apply the pipeline must restarted**: if a change + set failed to apply, it cannot be retried. The pipeline must be restarted from + the top by clicking **Release Change**. +- **A stack that failed to create must be deleted manually**: if a stack + failed to create on the first attempt, you must delete it using the + CloudFormation console before starting the pipeline again by clicking + **Release Change**. diff --git a/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts b/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts new file mode 100644 index 0000000000000..7517a02f35891 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/deploy-cdk-stack-action.ts @@ -0,0 +1,362 @@ +import * as cfn from '@aws-cdk/aws-cloudformation'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as cpactions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import * as iam from '@aws-cdk/aws-iam'; +import { Arn, Construct, Fn, Stack } from '@aws-cdk/core'; +import * as cxapi from '@aws-cdk/cx-api'; +import * as path from 'path'; +import { appOf, assemblyBuilderOf } from '../private/construct-internals'; + +/** + * Customization options for a DeployCdkStackAction + */ +export interface DeployCdkStackActionOptions { + /** + * Base name of the action + * + * @default stackName + */ + readonly baseActionName?: string; + + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * Run order for the Prepare action + * + * @default 1 + */ + readonly prepareRunOrder?: number; + + /** + * Run order for the Execute action + * + * @default - prepareRunOrder + 1 + */ + readonly executeRunOrder?: number; + + /** + * Artifact to write Stack Outputs to + * + * @default - No outputs + */ + readonly output?: codepipeline.Artifact; + + /** + * Filename in output to write Stack outputs to + * + * @default - Required when 'output' is set + */ + readonly outputFileName?: string; + + /** + * Name of the change set to create and deploy + * + * @default 'PipelineChange' + */ + readonly changeSetName?: string; +} + +/** + * Properties for a DeployCdkStackAction + */ +export interface DeployCdkStackActionProps extends DeployCdkStackActionOptions { + /** + * Relative path of template in the input artifact + */ + readonly templatePath: string; + + /** + * Role for the action to assume + * + * This controls the account to deploy into + */ + readonly actionRole: iam.IRole; + + /** + * The name of the stack that should be created/updated + */ + readonly stackName: string; + + /** + * Role to execute CloudFormation under + * + * @default - Execute CloudFormation using the action role + */ + readonly cloudFormationExecutionRole?: iam.IRole; + + /** + * Region to deploy into + * + * @default - Same region as pipeline + */ + readonly region?: string; + + /** + * Artifact ID for the stack deployed here + * + * Used for pipeline order checking. + * + * @default - Order will not be checked + */ + readonly stackArtifactId?: string; + + /** + * Artifact ID for the stacks this stack depends on + * + * Used for pipeline order checking. + * + * @default - No dependencies + */ + readonly dependencyStackArtifactIds?: string[]; +} + +/** + * Options for the 'fromStackArtifact' operation + */ +export interface CdkStackActionFromArtifactOptions extends DeployCdkStackActionOptions { + /** + * The name of the stack that should be created/updated + * + * @default - Same as stack artifact + */ + readonly stackName?: string; +} + +/** + * Action to deploy a CDK Stack + * + * Adds two CodePipeline Actions to the pipeline: one to create a ChangeSet + * and one to execute it. + * + * You do not need to instantiate this action yourself -- it will automatically + * be added by the pipeline when you add stack artifacts or entire stages. + */ +export class DeployCdkStackAction implements codepipeline.IAction { + /** + * Construct a DeployCdkStackAction from a Stack artifact + */ + public static fromStackArtifact(scope: Construct, artifact: cxapi.CloudFormationStackArtifact, options: CdkStackActionFromArtifactOptions) { + if (!artifact.assumeRoleArn) { + // tslint:disable-next-line:max-line-length + throw new Error(`Stack '${artifact.stackName}' does not have deployment role information; use the 'DefaultStackSynthesizer' synthesizer, or set the '@aws-cdk/core:newStyleStackSynthesis' context key.`); + } + + const actionRole = roleFromPlaceholderArn(scope, artifact.assumeRoleArn); + const cloudFormationExecutionRole = roleFromPlaceholderArn(scope, artifact.cloudFormationExecutionRoleArn); + + const artRegion = artifact.environment.region; + const region = artRegion === Stack.of(scope).region || artRegion === cxapi.UNKNOWN_REGION ? undefined : artRegion; + + // We need the path of the template relative to the root Cloud Assembly + // It should be easier to get this, but for now it is what it is. + const appAsmRoot = assemblyBuilderOf(appOf(scope)).outdir; + const fullTemplatePath = path.join(artifact.assembly.directory, artifact.templateFile); + const templatePath = path.relative(appAsmRoot, fullTemplatePath); + + return new DeployCdkStackAction({ + actionRole, + cloudFormationExecutionRole, + templatePath, + region, + stackArtifactId: artifact.id, + dependencyStackArtifactIds: artifact.dependencies.filter(isStackArtifact).map(s => s.id), + stackName: options.stackName ?? artifact.stackName, + ...options, + }); + } + + /** + * The runorder for the prepare action + */ + public readonly prepareRunOrder: number; + + /** + * The runorder for the execute action + */ + public readonly executeRunOrder: number; + + /** + * Name of the deployed stack + */ + public readonly stackName: string; + + /** + * Artifact id of the artifact this action was based on + */ + public readonly stackArtifactId?: string; + + /** + * Artifact ids of the artifact this stack artifact depends on + */ + public readonly dependencyStackArtifactIds: string[]; + + private readonly prepareChangeSetAction: cpactions.CloudFormationCreateReplaceChangeSetAction; + private readonly executeChangeSetAction: cpactions.CloudFormationExecuteChangeSetAction; + + constructor(props: DeployCdkStackActionProps) { + if (props.output && !props.outputFileName) { + throw new Error('If \'output\' is set, \'outputFileName\' is also required'); + } + + this.stackArtifactId = props.stackArtifactId; + this.dependencyStackArtifactIds = props.dependencyStackArtifactIds ?? []; + + this.prepareRunOrder = props.prepareRunOrder ?? 1; + this.executeRunOrder = props.executeRunOrder ?? this.prepareRunOrder + 1; + this.stackName = props.stackName; + const baseActionName = props.baseActionName ?? this.stackName; + const changeSetName = props.changeSetName ?? 'PipelineChange'; + + this.prepareChangeSetAction = new cpactions.CloudFormationCreateReplaceChangeSetAction({ + actionName: `${baseActionName}.Prepare`, + changeSetName, + runOrder: this.prepareRunOrder, + stackName: this.stackName, + templatePath: props.cloudAssemblyInput.atPath(props.templatePath), + adminPermissions: false, + role: props.actionRole, + deploymentRole: props.cloudFormationExecutionRole, + region: props.region, + capabilities: [cfn.CloudFormationCapabilities.NAMED_IAM, cfn.CloudFormationCapabilities.AUTO_EXPAND], + }); + this.executeChangeSetAction = new cpactions.CloudFormationExecuteChangeSetAction({ + actionName: `${baseActionName}.Deploy`, + changeSetName, + runOrder: this.executeRunOrder, + stackName: this.stackName, + role: props.actionRole, + region: props.region, + outputFileName: props.outputFileName, + output: props.output, + }); + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): + codepipeline.ActionConfig { + stage.addAction(this.prepareChangeSetAction); + + return this.executeChangeSetAction.bind(scope, stage, options); + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + return this.executeChangeSetAction.onStateChange(name, target, options); + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + return this.executeChangeSetAction.actionProperties; + } +} + +function roleFromPlaceholderArn(scope: Construct, arn: string): iam.IRole; +function roleFromPlaceholderArn(scope: Construct, arn: string | undefined): iam.IRole | undefined; +function roleFromPlaceholderArn(scope: Construct, arn: string | undefined): iam.IRole | undefined { + if (!arn) { return undefined; } + + // Use placeholdered arn as construct ID. + const id = arn; + + scope = hackyRoleScope(scope, arn); + + // https://github.com/aws/aws-cdk/issues/7255 + let existingRole = scope.node.tryFindChild(`ImmutableRole${id}`) as iam.IRole; + if (existingRole) { return existingRole; } + // For when #7255 is fixed. + existingRole = scope.node.tryFindChild(id) as iam.IRole; + if (existingRole) { return existingRole; } + + return iam.Role.fromRoleArn(scope, id, cfnExpressionFromManifestString(arn), { mutable: false }); +} + +/** + * MASSIVE HACK + * + * We have a bug in the CDK where it's only going to consider Roles that are physically in a + * different Stack object from the Pipeline "cross-account", and will add the appropriate + * Bucket/Key policies. + * https://github.com/aws/aws-cdk/pull/8280 will resolve this, but for now we fake it by hacking + * up a Stack object to root the role in! + * + * Fortunatey, we can just 'new up' an unrooted Stack (unit tests do this all the time) and toss it + * away. It will never be synthesized, but all the logic happens to work out! + */ +function hackyRoleScope(scope: Construct, arn: string): Construct { + const parts = Arn.parse(cxapi.EnvironmentPlaceholders.replace(arn, { + accountId: '', // Empty string on purpose, see below + partition: '', + region: '', + })); + return new Stack(undefined, undefined, { + env: { + // Empty string means ARN had a placeholder which means same account as pipeline stack + account: parts.account || Stack.of(scope).account, + // 'region' from an IAM ARN is always an empty string, so no point. + }, + }); +} + +/** + * Return a CloudFormation expression from a manifest string with placeholders + */ +function cfnExpressionFromManifestString(s: string) { + // This implementation relies on the fact that the manifest placeholders are + // '${AWS::Partition}' etc., and so are the same values as those that are + // trivially substituable using a `Fn.sub`. + return Fn.sub(s); +} + +/** + * Options for CdkDeployAction.fromStackArtifact + */ +export interface FromStackArtifactOptions { + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * Run order for the 2 actions that will be created + * + * @default 1 + */ + readonly prepareRunOrder?: number; + + /** + * Run order for the Execute action + * + * @default - prepareRunOrder + 1 + */ + readonly executeRunOrder?: number; + + /** + * Artifact to write Stack Outputs to + * + * @default - No outputs + */ + readonly output?: codepipeline.Artifact; + + /** + * Filename in output to write Stack outputs to + * + * @default - Required when 'output' is set + */ + readonly outputFileName?: string; +} + +function isStackArtifact(a: cxapi.CloudArtifact): a is cxapi.CloudFormationStackArtifact { + // instanceof is too risky, and we're at a too late stage to properly fix. + // return a instanceof cxapi.CloudFormationStackArtifact; + return a.constructor.name === 'CloudFormationStackArtifact'; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/actions/index.ts b/packages/@aws-cdk/pipelines/lib/actions/index.ts new file mode 100644 index 0000000000000..834ded93472f2 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/index.ts @@ -0,0 +1,3 @@ +export * from './deploy-cdk-stack-action'; +export * from './publish-assets-action'; +export * from './update-pipeline-action'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/actions/publish-assets-action.ts b/packages/@aws-cdk/pipelines/lib/actions/publish-assets-action.ts new file mode 100644 index 0000000000000..668d8f831b548 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/publish-assets-action.ts @@ -0,0 +1,153 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import * as iam from '@aws-cdk/aws-iam'; +import { Construct, Lazy } from '@aws-cdk/core'; + +/** + * Type of the asset that is being published + */ +export enum AssetType { + /** + * A file + */ + FILE = 'file', + + /** + * A Docker image + */ + DOCKER_IMAGE = 'docker-image', +} + +/** + * Props for a PublishAssetsAction + */ +export interface PublishAssetsActionProps { + /** + * Name of publishing action + */ + readonly actionName: string; + + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * AssetType we're publishing + */ + readonly assetType: AssetType; + + /** + * Version of CDK CLI to 'npm install'. + * + * @default - Latest version + */ + readonly cdkCliVersion?: string; + + /** + * Name of the CodeBuild project + * + * @default - Automatically generated + */ + readonly projectName?: string; +} + +/** + * Action to publish an asset in the pipeline + * + * Creates a CodeBuild project which will use the CDK CLI + * to prepare and publish the asset. + * + * You do not need to instantiate this action -- it will automatically + * be added by the pipeline when you add stacks that use assets. + */ +export class PublishAssetsAction extends Construct implements codepipeline.IAction { + private readonly action: codepipeline.IAction; + private readonly commands = new Array(); + + constructor(scope: Construct, id: string, private readonly props: PublishAssetsActionProps) { + super(scope, id); + + const installSuffix = props.cdkCliVersion ? `@${props.cdkCliVersion}` : ''; + + const project = new codebuild.PipelineProject(this, 'Default', { + projectName: this.props.projectName, + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + install: { + commands: `npm install -g cdk-assets${installSuffix}`, + }, + build: { + commands: Lazy.listValue({ produce: () => this.commands }), + }, + }, + }), + // Needed to perform Docker builds + environment: props.assetType === AssetType.DOCKER_IMAGE ? { privileged: true } : undefined, + }); + + const rolePattern = props.assetType === AssetType.DOCKER_IMAGE + ? 'arn:*:iam::*:role/*-image-publishing-role-*' + : 'arn:*:iam::*:role/*-file-publishing-role-*'; + + project.addToRolePolicy(new iam.PolicyStatement({ + actions: ['sts:AssumeRole'], + resources: [rolePattern], + })); + + this.action = new codepipeline_actions.CodeBuildAction({ + actionName: props.actionName, + project, + input: this.props.cloudAssemblyInput, + }); + } + + /** + * Add a single publishing command + * + * Manifest path should be relative to the root Cloud Assembly. + */ + public addPublishCommand(relativeManifestPath: string, assetSelector: string) { + const command = `cdk-assets --path "${relativeManifestPath}" --verbose publish "${assetSelector}"`; + if (!this.commands.includes(command)) { + this.commands.push(command); + } + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): + codepipeline.ActionConfig { + return this.action.bind(scope, stage, options); + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + return this.action.onStateChange(name, target, options); + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + // FIXME: I have had to make this class a Construct, because: + // + // - It needs access to the Construct tree, because it is going to add a `PipelineProject`. + // - I would have liked to have done that in bind(), however, + // - `actionProperties` (this method) is called BEFORE bind() is called, and by that point I + // don't have the "inner" Action yet to forward the call to. + // + // I've therefore had to construct the inner CodeBuildAction in the constructor, which requires making this + // Action a Construct. + // + // Combined with how non-intuitive it is to make the "StackDeployAction", I feel there is something + // wrong with the Action abstraction here. + return this.action.actionProperties; + } +} diff --git a/packages/@aws-cdk/pipelines/lib/actions/update-pipeline-action.ts b/packages/@aws-cdk/pipelines/lib/actions/update-pipeline-action.ts new file mode 100644 index 0000000000000..e7b19ac860102 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/actions/update-pipeline-action.ts @@ -0,0 +1,127 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as cpactions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import * as iam from '@aws-cdk/aws-iam'; +import { Construct } from '@aws-cdk/core'; +import { embeddedAsmPath } from '../private/construct-internals'; + +/** + * Props for the UpdatePipelineAction + */ +export interface UpdatePipelineActionProps { + /** + * The CodePipeline artifact that holds the Cloud Assembly. + */ + readonly cloudAssemblyInput: codepipeline.Artifact; + + /** + * Name of the pipeline stack + */ + readonly pipelineStackName: string; + + /** + * Version of CDK CLI to 'npm install'. + * + * @default - Latest version + */ + readonly cdkCliVersion?: string; + + /** + * Name of the CodeBuild project + * + * @default - Automatically generated + */ + readonly projectName?: string; +} + +/** + * Action to self-mutate the pipeline + * + * Creates a CodeBuild project which will use the CDK CLI + * to deploy the pipeline stack. + * + * You do not need to instantiate this action -- it will automatically + * be added by the pipeline. + */ +export class UpdatePipelineAction extends Construct implements codepipeline.IAction { + private readonly action: codepipeline.IAction; + + constructor(scope: Construct, id: string, props: UpdatePipelineActionProps) { + super(scope, id); + + const installSuffix = props.cdkCliVersion ? `@${props.cdkCliVersion}` : ''; + + const selfMutationProject = new codebuild.PipelineProject(this, 'SelfMutation', { + projectName: props.projectName, + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + install: { + commands: `npm install -g aws-cdk${installSuffix}`, + }, + build: { + commands: [ + // Cloud Assembly is in *current* directory. + `cdk -a ${embeddedAsmPath(scope)} deploy ${props.pipelineStackName} --require-approval=never --verbose`, + ], + }, + }, + }), + }); + + // allow the self-mutating project permissions to assume the bootstrap Action role + selfMutationProject.addToRolePolicy(new iam.PolicyStatement({ + actions: ['sts:AssumeRole'], + resources: ['arn:*:iam::*:role/*-deploy-role-*', 'arn:*:iam::*:role/*-publishing-role-*'], + })); + selfMutationProject.addToRolePolicy(new iam.PolicyStatement({ + actions: ['cloudformation:DescribeStacks'], + resources: ['*'], // this is needed to check the status of the bootstrap stack when doing `cdk deploy` + })); + // S3 checks for the presence of the ListBucket permission + selfMutationProject.addToRolePolicy(new iam.PolicyStatement({ + actions: ['s3:ListBucket'], + resources: ['*'], + })); + this.action = new cpactions.CodeBuildAction({ + actionName: 'SelfMutate', + input: props.cloudAssemblyInput, + project: selfMutationProject, + }); + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): + codepipeline.ActionConfig { + return this.action.bind(scope, stage, options); + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + return this.action.onStateChange(name, target, options); + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + // FIXME: I have had to make this class a Construct, because: + // + // - It needs access to the Construct tree, because it is going to add a `PipelineProject`. + // - I would have liked to have done that in bind(), however, + // - `actionProperties` (this method) is called BEFORE bind() is called, and by that point I + // don't have the "inner" Action yet to forward the call to. + // + // I've therefore had to construct the inner CodeBuildAction in the constructor, which requires making this + // Action a Construct. + // + // Combined with how non-intuitive it is to make the "StackDeployAction", I feel there is something + // wrong with the Action abstraction here. + return this.action.actionProperties; + } +} diff --git a/packages/@aws-cdk/pipelines/lib/index.ts b/packages/@aws-cdk/pipelines/lib/index.ts new file mode 100644 index 0000000000000..dbe8a73291c23 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/index.ts @@ -0,0 +1,5 @@ +export * from './pipeline'; +export * from './stage'; +export * from './synths'; +export * from './actions'; +export * from './validation'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/pipeline.ts b/packages/@aws-cdk/pipelines/lib/pipeline.ts new file mode 100644 index 0000000000000..75fb655560c6b --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/pipeline.ts @@ -0,0 +1,313 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { App, CfnOutput, Construct, Stack, Stage } from '@aws-cdk/core'; +import * as path from 'path'; +import { AssetType, DeployCdkStackAction, PublishAssetsAction, UpdatePipelineAction } from './actions'; +import { appOf, assemblyBuilderOf } from './private/construct-internals'; +import { AddStageOptions, AssetPublishingCommand, CdkStage, StackOutput } from './stage'; + +/** + * Properties for a CdkPipeline + */ +export interface CdkPipelineProps { + /** + * The CodePipeline action used to retrieve the CDK app's source + */ + readonly sourceAction: codepipeline.IAction; + + /** + * The CodePipeline action build and synthesis step of the CDK app + */ + readonly synthAction: codepipeline.IAction; + + /** + * The artifact you have defined to be the artifact to hold the cloudAssemblyArtifact for the synth action + */ + readonly cloudAssemblyArtifact: codepipeline.Artifact; + + /** + * Name of the pipeline + * + * @default - A name is automatically generated + */ + readonly pipelineName?: string; + + /** + * CDK CLI version to use in pipeline + * + * Some Actions in the pipeline will download and run a version of the CDK + * CLI. Specify the version here. + * + * @default - Latest version + */ + readonly cdkCliVersion?: string; +} + +/** + * A Pipeline to deploy CDK apps + * + * Defines an AWS CodePipeline-based Pipeline to deploy CDK applications. + * + * Automatically manages the following: + * + * - Stack dependency order. + * - Asset publishing. + * - Keeping the pipeline up-to-date as the CDK apps change. + * - Using stack outputs later on in the pipeline. + */ +export class CdkPipeline extends Construct { + private readonly _pipeline: codepipeline.Pipeline; + private readonly _assets: AssetPublishing; + private readonly _stages: CdkStage[] = []; + private readonly _outputArtifacts: Record = {}; + private readonly _cloudAssemblyArtifact: codepipeline.Artifact; + + constructor(scope: Construct, id: string, props: CdkPipelineProps) { + super(scope, id); + + if (!App.isApp(this.node.root)) { + throw new Error('CdkPipeline must be created under an App'); + } + + this._cloudAssemblyArtifact = props.cloudAssemblyArtifact; + const pipelineStack = Stack.of(this); + + this._pipeline = new codepipeline.Pipeline(this, 'Pipeline', { + ...props, + restartExecutionOnUpdate: true, + stages: [ + { + stageName: 'Source', + actions: [props.sourceAction], + }, + { + stageName: 'Build', + actions: [props.synthAction], + }, + { + stageName: 'UpdatePipeline', + actions: [new UpdatePipelineAction(this, 'UpdatePipeline', { + cloudAssemblyInput: this._cloudAssemblyArtifact, + pipelineStackName: pipelineStack.stackName, + cdkCliVersion: props.cdkCliVersion, + projectName: maybeSuffix(props.pipelineName, '-selfupdate'), + })], + }, + ], + }); + + this._assets = new AssetPublishing(this, 'Assets', { + cloudAssemblyInput: this._cloudAssemblyArtifact, + cdkCliVersion: props.cdkCliVersion, + pipeline: this._pipeline, + projectName: maybeSuffix(props.pipelineName, '-publish'), + }); + } + + /** + * Add pipeline stage that will deploy the given application stage + * + * The application construct should subclass `Stage` and can contain any + * number of `Stacks` inside it that may have dependency relationships + * on one another. + * + * All stacks in the application will be deployed in the appropriate order, + * and all assets found in the application will be added to the asset + * publishing stage. + */ + public addApplicationStage(appStage: Stage, options: AddStageOptions = {}): CdkStage { + const stage = this.addStage(appStage.stageName); + stage.addApplication(appStage, options); + return stage; + } + + /** + * Add a new, empty stage to the pipeline + * + * Prefer to use `addApplicationStage` if you are intended to deploy a CDK + * application, but you can use this method if you want to add other kinds of + * Actions to a pipeline. + */ + public addStage(stageName: string) { + const pipelineStage = this._pipeline.addStage({ + stageName, + }); + + const stage = new CdkStage(this, stageName, { + cloudAssemblyArtifact: this._cloudAssemblyArtifact, + pipelineStage, + stageName, + host: { + publishAsset: this._assets.addPublishAssetAction.bind(this._assets), + stackOutputArtifact: (artifactId) => this._outputArtifacts[artifactId], + }, + }); + this._stages.push(stage); + return stage; + } + + /** + * Get the StackOutput object that holds this CfnOutput's value in this pipeline + * + * `StackOutput` can be used in validation actions later in the pipeline. + */ + public stackOutput(cfnOutput: CfnOutput): StackOutput { + const stack = Stack.of(cfnOutput); + + if (!this._outputArtifacts[stack.artifactId]) { + // We should have stored the ArtifactPath in the map, but its Artifact + // property isn't publicly readable... + this._outputArtifacts[stack.artifactId] = new codepipeline.Artifact(`Artifact_${stack.artifactId}_Outputs`); + } + + return new StackOutput(this._outputArtifacts[stack.artifactId].atPath('outputs.json'), cfnOutput.logicalId); + } + + /** + * Validate that we don't have any stacks violating dependency order in the pipeline + * + * Our own convenience methods will never generate a pipeline that does that (although + * this is a nice verification), but a user can also add the stacks by hand. + */ + protected validate(): string[] { + const ret = new Array(); + + ret.push(...this.validateDeployOrder()); + ret.push(...this.validateRequestedOutputs()); + + return ret; + } + + protected onPrepare() { + super.onPrepare(); + + // TODO: Support this in a proper way in the upstream library. For now, we + // "un-add" the Assets stage if it turns out to be empty. + this._assets.removeAssetsStageIfEmpty(); + } + + /** + * Return all StackDeployActions in an ordered list + */ + private get stackActions(): DeployCdkStackAction[] { + return flatMap(this._pipeline.stages, s => s.actions.filter(isDeployAction)); + } + + private* validateDeployOrder(): IterableIterator { + const stackActions = this.stackActions; + for (const stackAction of stackActions) { + // For every dependency, it must be executed in an action before this one is prepared. + for (const depId of stackAction.dependencyStackArtifactIds) { + const depAction = stackActions.find(s => s.stackArtifactId === depId); + + if (depAction === undefined) { + this.node.addWarning(`Stack '${stackAction.stackName}' depends on stack ` + + `'${depId}', but that dependency is not deployed through the pipeline!`); + } else if (!(depAction.executeRunOrder < stackAction.prepareRunOrder)) { + yield `Stack '${stackAction.stackName}' depends on stack ` + + `'${depAction.stackName}', but is deployed before it in the pipeline!`; + } + } + } + } + + private* validateRequestedOutputs(): IterableIterator { + const artifactIds = this.stackActions.map(s => s.stackArtifactId); + + for (const artifactId of Object.keys(this._outputArtifacts)) { + if (!artifactIds.includes(artifactId)) { + yield `Trying to use outputs for Stack '${artifactId}', but Stack is not deployed in this pipeline. Add it to the pipeline.`; + } + } + } +} + +function isDeployAction(a: codepipeline.IAction): a is DeployCdkStackAction { + return a instanceof DeployCdkStackAction; +} + +function flatMap(xs: A[], f: (x: A) => B[]): B[] { + return Array.prototype.concat([], ...xs.map(f)); +} + +interface AssetPublishingProps { + readonly cloudAssemblyInput: codepipeline.Artifact; + readonly pipeline: codepipeline.Pipeline; + readonly cdkCliVersion?: string; + readonly projectName?: string; +} + +/** + * Add appropriate publishing actions to the asset publishing stage + */ +class AssetPublishing extends Construct { + private readonly publishers: Record = {}; + private readonly myCxAsmRoot: string; + + private readonly stage: codepipeline.IStage; + private _fileAssetCtr = 1; + private _dockerAssetCtr = 1; + + constructor(scope: Construct, id: string, private readonly props: AssetPublishingProps) { + super(scope, id); + this.myCxAsmRoot = path.resolve(assemblyBuilderOf(appOf(this)).outdir); + + // We MUST add the Stage immediately here, otherwise it will be in the wrong place + // in the pipeline! + this.stage = this.props.pipeline.addStage({ stageName: 'Assets' }); + } + + /** + * Make sure there is an action in the stage to publish the given asset + * + * Assets are grouped by asset ID (which represent individual assets) so all assets + * are published in parallel. For each assets, all destinations are published sequentially + * so that we can reuse expensive operations between them (mostly: building a Docker image). + */ + public addPublishAssetAction(command: AssetPublishingCommand) { + // FIXME: this is silly, we need the relative path here but no easy way to get it + const relativePath = path.relative(this.myCxAsmRoot, command.assetManifestPath); + + let action = this.publishers[command.assetId]; + if (!action) { + // The asset ID would be a logical candidate for the construct path and project names, but if the asset + // changes it leads to recreation of a number of Role/Policy/Project resources which is slower than + // necessary. Number sequentially instead. + // + // FIXME: The ultimate best solution is probably to generate a single Project per asset type + // and reuse that for all assets. + + const id = command.assetType === AssetType.FILE ? `FileAsset${this._fileAssetCtr++}` : `DockerAsset${this._dockerAssetCtr++}`; + + action = this.publishers[command.assetId] = new PublishAssetsAction(this, id, { + actionName: command.assetId, + cloudAssemblyInput: this.props.cloudAssemblyInput, + cdkCliVersion: this.props.cdkCliVersion, + assetType: command.assetType, + }); + this.stage.addAction(action); + } + + action.addPublishCommand(relativePath, command.assetSelector); + } + + /** + * Remove the Assets stage if it turns out we didn't add any Assets to publish + */ + public removeAssetsStageIfEmpty() { + if (Object.keys(this.publishers).length === 0) { + // Hacks to get access to innards of Pipeline + // Modify 'stages' array in-place to remove Assets stage if empty + const stages: codepipeline.IStage[] = (this.props.pipeline as any)._stages; + + const ix = stages.indexOf(this.stage); + if (ix > -1) { + stages.splice(ix, 1); + } + } + } +} + +function maybeSuffix(x: string | undefined, suffix: string): string | undefined { + if (x === undefined) { return undefined; } + return `${x}${suffix}`; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/private/asset-manifest.ts b/packages/@aws-cdk/pipelines/lib/private/asset-manifest.ts new file mode 100644 index 0000000000000..752c7c242bc48 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/private/asset-manifest.ts @@ -0,0 +1,296 @@ +// FIXME: copied from `ckd-assets`, because this tool needs to read the asset manifest aswell. +import { AssetManifest, DockerImageDestination, DockerImageSource, FileDestination, FileSource, Manifest } from '@aws-cdk/cloud-assembly-schema'; +import * as fs from 'fs'; +import * as path from 'path'; + +/** + * A manifest of assets + */ +export class AssetManifestReader { + /** + * The default name of the asset manifest in a cdk.out directory + */ + public static readonly DEFAULT_FILENAME = 'assets.json'; + + /** + * Load an asset manifest from the given file + */ + public static fromFile(fileName: string) { + try { + const obj = Manifest.loadAssetManifest(fileName); + + return new AssetManifestReader(path.dirname(fileName), obj); + } catch (e) { + throw new Error(`Canot read asset manifest '${fileName}': ${e.message}`); + } + } + + /** + * Load an asset manifest from the given file or directory + * + * If the argument given is a directoy, the default asset file name will be used. + */ + public static fromPath(filePath: string) { + let st; + try { + st = fs.statSync(filePath); + } catch (e) { + throw new Error(`Cannot read asset manifest at '${filePath}': ${e.message}`); + } + if (st.isDirectory()) { + return AssetManifestReader.fromFile(path.join(filePath, AssetManifestReader.DEFAULT_FILENAME)); + } + return AssetManifestReader.fromFile(filePath); + } + + /** + * The directory where the manifest was found + */ + public readonly directory: string; + + constructor(directory: string, private readonly manifest: AssetManifest) { + this.directory = directory; + } + + /** + * Select a subset of assets and destinations from this manifest. + * + * Only assets with at least 1 selected destination are retained. + * + * If selection is not given, everything is returned. + */ + public select(selection?: DestinationPattern[]): AssetManifestReader { + if (selection === undefined) { return this; } + + const ret: AssetManifest & Required> + = { version: this.manifest.version, dockerImages: {}, files: {} }; + + for (const assetType of ASSET_TYPES) { + for (const [assetId, asset] of Object.entries(this.manifest[assetType] || {})) { + const filteredDestinations = filterDict( + asset.destinations, + (_, destId) => selection.some(sel => sel.matches(new DestinationIdentifier(assetId, destId)))); + + if (Object.keys(filteredDestinations).length > 0) { + ret[assetType][assetId] = { + ...asset, + destinations: filteredDestinations, + }; + } + } + } + + return new AssetManifestReader(this.directory, ret); + } + + /** + * Describe the asset manifest as a list of strings + */ + public list() { + return [ + ...describeAssets('file', this.manifest.files || {}), + ...describeAssets('docker-image', this.manifest.dockerImages || {}), + ]; + + function describeAssets(type: string, assets: Record }>) { + const ret = new Array(); + for (const [assetId, asset] of Object.entries(assets || {})) { + ret.push(`${assetId} ${type} ${JSON.stringify(asset.source)}`); + + const destStrings = Object.entries(asset.destinations).map(([destId, dest]) => ` ${assetId}:${destId} ${JSON.stringify(dest)}`); + ret.push(...prefixTreeChars(destStrings, ' ')); + } + return ret; + } + } + + /** + * List of assets, splat out to destinations + */ + public get entries(): IManifestEntry[] { + return [ + ...makeEntries(this.manifest.files || {}, FileManifestEntry), + ...makeEntries(this.manifest.dockerImages || {}, DockerImageManifestEntry), + ]; + + function makeEntries( + assets: Record }>, + ctor: new (id: DestinationIdentifier, source: A, destination: B) => C): C[] { + + const ret = new Array(); + for (const [assetId, asset] of Object.entries(assets)) { + for (const [destId, destination] of Object.entries(asset.destinations)) { + ret.push(new ctor(new DestinationIdentifier(assetId, destId), asset.source, destination)); + } + } + return ret; + } + } +} + +type AssetType = 'files' | 'dockerImages'; + +const ASSET_TYPES: AssetType[] = ['files', 'dockerImages']; + +/** + * A single asset from an asset manifest' + */ +export interface IManifestEntry { + /** + * The identifier of the asset + */ + readonly id: DestinationIdentifier; + + /** + * The type of asset + */ + readonly type: string; + + /** + * Type-dependent source data + */ + readonly genericSource: unknown; + + /** + * Type-dependent destination data + */ + readonly genericDestination: unknown; +} + +/** + * A manifest entry for a file asset + */ +export class FileManifestEntry implements IManifestEntry { + public readonly genericSource: unknown; + public readonly genericDestination: unknown; + public readonly type = 'file'; + + constructor( + /** Identifier for this asset */ + public readonly id: DestinationIdentifier, + /** Source of the file asset */ + public readonly source: FileSource, + /** Destination for the file asset */ + public readonly destination: FileDestination, + ) { + this.genericSource = source; + this.genericDestination = destination; + } +} + +/** + * A manifest entry for a docker image asset + */ +export class DockerImageManifestEntry implements IManifestEntry { + public readonly genericSource: unknown; + public readonly genericDestination: unknown; + public readonly type = 'docker-image'; + + constructor( + /** Identifier for this asset */ + public readonly id: DestinationIdentifier, + /** Source of the file asset */ + public readonly source: DockerImageSource, + /** Destination for the file asset */ + public readonly destination: DockerImageDestination, + ) { + this.genericSource = source; + this.genericDestination = destination; + } +} + +/** + * Identify an asset destination in an asset manifest + */ +export class DestinationIdentifier { + /** + * Identifies the asset, by source. + */ + public readonly assetId: string; + + /** + * Identifies the destination where this asset will be published + */ + public readonly destinationId: string; + + constructor(assetId: string, destinationId: string) { + this.assetId = assetId; + this.destinationId = destinationId; + } + + /** + * Return a string representation for this asset identifier + */ + public toString() { + return this.destinationId ? `${this.assetId}:${this.destinationId}` : this.assetId; + } +} + +function filterDict(xs: Record, pred: (x: A, key: string) => boolean): Record { + const ret: Record = {}; + for (const [key, value] of Object.entries(xs)) { + if (pred(value, key)) { + ret[key] = value; + } + } + return ret; +} + +/** + * A filter pattern for an destination identifier + */ +export class DestinationPattern { + /** + * Parse a ':'-separated string into an asset/destination identifier + */ + public static parse(s: string) { + if (!s) { throw new Error('Empty string is not a valid destination identifier'); } + const parts = s.split(':').map(x => x !== '*' ? x : undefined); + if (parts.length === 1) { return new DestinationPattern(parts[0]); } + if (parts.length === 2) { return new DestinationPattern(parts[0] || undefined, parts[1] || undefined); } + throw new Error(`Asset identifier must contain at most 2 ':'-separated parts, got '${s}'`); + } + + /** + * Identifies the asset, by source. + */ + public readonly assetId?: string; + + /** + * Identifies the destination where this asset will be published + */ + public readonly destinationId?: string; + + constructor(assetId?: string, destinationId?: string) { + this.assetId = assetId; + this.destinationId = destinationId; + } + + /** + * Whether or not this pattern matches the given identifier + */ + public matches(id: DestinationIdentifier) { + return (this.assetId === undefined || this.assetId === id.assetId) + && (this.destinationId === undefined || this.destinationId === id.destinationId); + } + + /** + * Return a string representation for this asset identifier + */ + public toString() { + return `${this.assetId ?? '*'}:${this.destinationId ?? '*'}`; + } +} + +/** + * Prefix box-drawing characters to make lines look like a hanging tree + */ +function prefixTreeChars(xs: string[], prefix = '') { + const ret = new Array(); + for (let i = 0; i < xs.length; i++) { + const isLast = i === xs.length - 1; + const boxChar = isLast ? '└' : '├'; + ret.push(`${prefix}${boxChar}${xs[i]}`); + } + return ret; +} diff --git a/packages/@aws-cdk/pipelines/lib/private/construct-internals.ts b/packages/@aws-cdk/pipelines/lib/private/construct-internals.ts new file mode 100644 index 0000000000000..13b1ac7c1dd0c --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/private/construct-internals.ts @@ -0,0 +1,37 @@ +/** + * Get access to construct internals that we need but got removed from the Stages PR. + */ +import { App, IConstruct, Stage } from '@aws-cdk/core'; +import * as cxapi from '@aws-cdk/cx-api'; +import * as path from 'path'; + +export function appOf(construct: IConstruct): App { + const root = construct.node.root; + + if (!App.isApp(root)) { + throw new Error(`Construct must be created under an App, but is not: ${construct.node.path}`); + } + + return root; +} + +export function assemblyBuilderOf(stage: Stage): cxapi.CloudAssemblyBuilder { + return (stage as any)._assemblyBuilder; +} + +/** + * Return the relative path from the app assembly to the scope's (nested) assembly + */ +export function embeddedAsmPath(scope: IConstruct) { + const appAsmRoot = assemblyBuilderOf(appOf(scope)).outdir; + const stage = Stage.of(scope) ?? appOf(scope); + const stageAsmRoot = assemblyBuilderOf(stage).outdir; + return path.relative(appAsmRoot, stageAsmRoot) || '.'; +} + +/** + * Determine the directory where the cloud assembly will be written, for use in a BuildSpec + */ +export function cloudAssemblyBuildSpecDir(scope: IConstruct) { + return assemblyBuilderOf(appOf(scope)).outdir; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/private/toposort.ts b/packages/@aws-cdk/pipelines/lib/private/toposort.ts new file mode 100644 index 0000000000000..8386a6d26bb82 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/private/toposort.ts @@ -0,0 +1,47 @@ +export type KeyFunc = (x: T) => string; +export type DepFunc = (x: T) => string[]; + +/** + * Return a topological sort of all elements of xs, according to the given dependency functions + * + * Dependencies outside the referenced set are ignored. + * + * Not a stable sort, but in order to keep the order as stable as possible, we'll sort by key + * among elements of equal precedence. + * + * Returns tranches of elements of equal precedence. + */ +export function topologicalSort(xs: Iterable, keyFn: KeyFunc, depFn: DepFunc): T[][] { + const remaining = new Map>(); + for (const element of xs) { + const key = keyFn(element); + remaining.set(key, { key, element, dependencies: depFn(element) }); + } + + const ret = new Array(); + while (remaining.size > 0) { + // All elements with no more deps in the set can be ordered + const selectable = Array.from(remaining.values()).filter(e => e.dependencies.every(d => !remaining.has(d))); + + selectable.sort((a, b) => a.key < b.key ? -1 : b.key < a.key ? 1 : 0); + + // If we didn't make any progress, we got stuck + if (selectable.length === 0) { + throw new Error(`Could not determine ordering between: ${Array.from(remaining.keys()).join(', ')}`); + } + + ret.push(selectable.map(s => s.element)); + + for (const selected of selectable) { + remaining.delete(selected.key); + } + } + + return ret; +} + +interface TopoElement { + key: string; + dependencies: string[]; + element: T; +} diff --git a/packages/@aws-cdk/pipelines/lib/stage.ts b/packages/@aws-cdk/pipelines/lib/stage.ts new file mode 100644 index 0000000000000..2441da072cede --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/stage.ts @@ -0,0 +1,388 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as cpactions from '@aws-cdk/aws-codepipeline-actions'; +import { Construct, Stage } from '@aws-cdk/core'; +import * as cxapi from '@aws-cdk/cx-api'; +import { AssetType, DeployCdkStackAction } from './actions'; +import { AssetManifestReader, DockerImageManifestEntry, FileManifestEntry } from './private/asset-manifest'; +import { topologicalSort } from './private/toposort'; + +/** + * Construction properties for a CdkStage + */ +export interface CdkStageProps { + /** + * Name of the stage that should be created + */ + readonly stageName: string; + + /** + * The underlying Pipeline Stage associated with thisCdkStage + */ + readonly pipelineStage: codepipeline.IStage; + + /** + * The CodePipeline Artifact with the Cloud Assembly + */ + readonly cloudAssemblyArtifact: codepipeline.Artifact; + + /** + * Features the Stage needs from its environment + */ + readonly host: IStageHost; +} + +/** + * Stage in a CdkPipeline + * + * You don't need to instantiate this class directly. Use + * `cdkPipeline.addStage()` instead. + */ +export class CdkStage extends Construct { + private _nextSequentialRunOrder = 1; // Must start at 1 eh + private _manualApprovalCounter = 1; + private readonly pipelineStage: codepipeline.IStage; + private readonly cloudAssemblyArtifact: codepipeline.Artifact; + private readonly stacksToDeploy = new Array(); + private readonly stageName: string; + private readonly host: IStageHost; + private _prepared = false; + + constructor(scope: Construct, id: string, props: CdkStageProps) { + super(scope, id); + + this.stageName = props.stageName; + this.pipelineStage = props.pipelineStage; + this.cloudAssemblyArtifact = props.cloudAssemblyArtifact; + this.host = props.host; + } + + /** + * Add all stacks in the application Stage to this stage + * + * The application construct should subclass `Stage` and can contain any + * number of `Stacks` inside it that may have dependency relationships + * on one another. + * + * All stacks in the application will be deployed in the appropriate order, + * and all assets found in the application will be added to the asset + * publishing stage. + */ + public addApplication(appStage: Stage, options: AddStageOptions = {}) { + const asm = appStage.synth(); + + const sortedTranches = topologicalSort(asm.stacks, + stack => stack.id, + stack => stack.dependencies.map(d => d.id)); + + for (const stacks of sortedTranches) { + const runOrder = this.nextSequentialRunOrder(2); // We need 2 actions + let executeRunOrder = runOrder + 1; + + // If we need to insert a manual approval action, then what's the executeRunOrder + // now is where we add a manual approval step, and we allocate 1 more runOrder + // for the execute. + if (options.manualApprovals) { + this.addManualApprovalAction({ runOrder: executeRunOrder }); + executeRunOrder = this.nextSequentialRunOrder(); + } + + // These don't have a dependency on each other, so can all be added in parallel + for (const stack of stacks) { + this.addStackArtifactDeployment(stack, { runOrder, executeRunOrder }); + } + } + } + + /** + * Add a deployment action based on a stack artifact + */ + public addStackArtifactDeployment(stackArtifact: cxapi.CloudFormationStackArtifact, options: AddStackOptions = {}) { + // Get all assets manifests and add the assets in 'em to the asset publishing stage. + this.publishAssetDependencies(stackArtifact); + + // Remember for later, see 'prepare()' + // We know that deploying a stack is going to take up 2 runorder slots later on. + const runOrder = options.runOrder ?? this.nextSequentialRunOrder(2); + const executeRunOrder = options.executeRunOrder ?? runOrder + 1; + this.stacksToDeploy.push({ + prepareRunOrder: runOrder, + executeRunOrder, + stackArtifact, + }); + + this.advanceRunOrderPast(runOrder); + this.advanceRunOrderPast(executeRunOrder); + } + + /** + * Add a manual approval action + * + * If you need more flexibility than what this method offers, + * use `addAction` with a `ManualApprovalAction`. + */ + public addManualApprovalAction(options: AddManualApprovalOptions = {}) { + let actionName = options.actionName; + if (!actionName) { + actionName = `ManualApproval${this._manualApprovalCounter > 1 ? this._manualApprovalCounter : ''}`; + this._manualApprovalCounter += 1; + } + + this.addActions(new cpactions.ManualApprovalAction({ + actionName, + runOrder: options.runOrder ?? this.nextSequentialRunOrder(), + })); + } + + /** + * Add one or more CodePipeline Actions + * + * You need to make sure it is created with the right runOrder. Call `nextSequentialRunOrder()` + * for every action to get actions to execute in sequence. + */ + public addActions(...actions: codepipeline.IAction[]) { + for (const action of actions) { + this.pipelineStage.addAction(action); + } + } + + /** + * Return the runOrder number necessary to run the next Action in sequence with the rest + * + * FIXME: This is here because Actions are immutable and can't be reordered + * after creation, nor is there a way to specify relative priorities, which + * is a limitation that we should take away in the base library. + */ + public nextSequentialRunOrder(count: number = 1): number { + const ret = this._nextSequentialRunOrder; + this._nextSequentialRunOrder += count; + return ret; + } + + /** + * Whether this Stage contains an action to deploy the given stack, identified by its artifact ID + */ + public deploysStack(artifactId: string) { + return this.stacksToDeploy.map(s => s.stackArtifact.id).includes(artifactId); + } + + /** + * Actually add all the DeployStack actions to the stage. + * + * We do this late because before we can render the actual DeployActions, + * we need to know whether or not we need to capture the stack outputs. + * + * FIXME: This is here because Actions are immutable and can't be reordered + * after creation, nor is there a way to specify relative priorities, which + * is a limitation that we should take away in the base library. + */ + protected prepare() { + // FIXME: Make sure this only gets run once. There seems to be an issue in the reconciliation + // loop that may trigger this more than once if it throws an error somewhere, and the exception + // that gets thrown here will then override the actual failure. + if (this._prepared) { return; } + this._prepared = true; + + for (const { prepareRunOrder: runOrder, stackArtifact } of this.stacksToDeploy) { + const artifact = this.host.stackOutputArtifact(stackArtifact.id); + + this.pipelineStage.addAction(DeployCdkStackAction.fromStackArtifact(this, stackArtifact, { + baseActionName: this.simplifyStackName(stackArtifact.stackName), + cloudAssemblyInput: this.cloudAssemblyArtifact, + output: artifact, + outputFileName: artifact ? 'outputs.json' : undefined, + prepareRunOrder: runOrder, + })); + } + } + + /** + * Advance the runorder counter so that the next sequential number is higher than the given one + */ + private advanceRunOrderPast(lastUsed: number) { + this._nextSequentialRunOrder = Math.max(lastUsed + 1, this._nextSequentialRunOrder); + } + + /** + * Simplify the stack name by removing the `Stage-` prefix if it exists. + */ + private simplifyStackName(s: string) { + return stripPrefix(s, `${this.stageName}-`); + } + + /** + * Make sure all assets depended on by this stack are published in this pipeline + * + * Taking care to exclude the stack template itself -- it is being published + * as an asset because the CLI needs to know the asset publishing role when + * pushing the template to S3, but in the case of CodePipeline we always + * reference the template from the artifact bucket. + * + * (NOTE: this is only true for top-level stacks, not nested stacks. Nested + * Stack templates are always published as assets). + */ + private publishAssetDependencies(stackArtifact: cxapi.CloudFormationStackArtifact) { + const assetManifests = stackArtifact.dependencies.filter(isAssetManifest); + + for (const manifestArtifact of assetManifests) { + const manifest = AssetManifestReader.fromFile(manifestArtifact.file); + + for (const entry of manifest.entries) { + let assetType: AssetType; + if (entry instanceof DockerImageManifestEntry) { + assetType = AssetType.DOCKER_IMAGE; + } else if (entry instanceof FileManifestEntry) { + // Don't publishg the template for this stack + if (entry.source.packaging === 'file' && entry.source.path === stackArtifact.templateFile) { + continue; + } + + assetType = AssetType.FILE; + } else { + throw new Error(`Unrecognized asset type: ${entry.type}`); + } + + this.host.publishAsset({ + assetManifestPath: manifestArtifact.file, + assetId: entry.id.assetId, + assetSelector: entry.id.toString(), + assetType, + }); + } + } + } +} + +/** + * Additional options for adding a stack deployment + */ +export interface AddStackOptions { + /** + * Base runorder + * + * @default - Next sequential runorder + */ + readonly runOrder?: number; + + /** + * Base runorder + * + * @default - runOrder + 1 + */ + readonly executeRunOrder?: number; +} + +/** + * A single output of a Stack + */ +export class StackOutput { + /** + * The artifact and file the output is stored in + */ + public readonly artifactFile: codepipeline.ArtifactPath; + + /** + * The name of the output in the JSON object in the file + */ + public readonly outputName: string; + + /** + * Build a StackOutput from a known artifact and an output name + */ + constructor(artifactFile: codepipeline.ArtifactPath, outputName: string) { + this.artifactFile = artifactFile; + this.outputName = outputName; + } +} + +function stripPrefix(s: string, prefix: string) { + return s.startsWith(prefix) ? s.substr(prefix.length) : s; +} + +function isAssetManifest(s: cxapi.CloudArtifact): s is cxapi.AssetManifestArtifact { + // instanceof is too risky, and we're at a too late stage to properly fix. + // return s instanceof cxapi.AssetManifestArtifact; + return s.constructor.name === 'AssetManifestArtifact'; +} + +/** + * Features that the Stage needs from its environment + */ +export interface IStageHost { + /** + * Make sure all the assets from the given manifest are published + */ + publishAsset(command: AssetPublishingCommand): void; + + /** + * Return the Artifact the given stack has to emit its outputs into, if any + */ + stackOutputArtifact(stackArtifactId: string): codepipeline.Artifact | undefined; +} + +/** + * Instructions to publish certain assets + */ +export interface AssetPublishingCommand { + /** + * Asset manifest path + */ + readonly assetManifestPath: string; + + /** + * Asset identifier + */ + readonly assetId: string; + + /** + * Asset selector to pass to `cdk-assets`. + */ + readonly assetSelector: string; + + /** + * Type of asset to publish + */ + readonly assetType: AssetType; +} + +/** + * Options for adding an application stage to a pipeline + */ +export interface AddStageOptions { + /** + * Add manual approvals before executing change sets + * + * This gives humans the opportunity to confirm the change set looks alright + * before deploying it. + * + * @default false + */ + readonly manualApprovals?: boolean; +} + +/** + * Options for addManualApproval + */ +export interface AddManualApprovalOptions { + /** + * The name of the manual approval action + * + * @default 'ManualApproval' with a rolling counter + */ + readonly actionName?: string; + + /** + * The runOrder for this action + * + * @default - The next sequential runOrder + */ + readonly runOrder?: number; +} + +/** + * Queued "deploy stack" command that is reified during prepare() + */ +interface DeployStackCommand { + prepareRunOrder: number; + executeRunOrder: number; + stackArtifact: cxapi.CloudFormationStackArtifact; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/synths/_util.ts b/packages/@aws-cdk/pipelines/lib/synths/_util.ts new file mode 100644 index 0000000000000..83f83bc802564 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/synths/_util.ts @@ -0,0 +1,15 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; + +export function copyEnvironmentVariables(...names: string[]): Record { + const ret: Record = {}; + for (const name of names) { + if (process.env[name]) { + ret[name] = { value: process.env[name] }; + } + } + return ret; +} + +export function filterEmpty(xs: Array): string[] { + return xs.filter(x => x) as any; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/synths/index.ts b/packages/@aws-cdk/pipelines/lib/synths/index.ts new file mode 100644 index 0000000000000..4764f7d9647c6 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/synths/index.ts @@ -0,0 +1 @@ +export * from './simple-synth-action'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/synths/simple-synth-action.ts b/packages/@aws-cdk/pipelines/lib/synths/simple-synth-action.ts new file mode 100644 index 0000000000000..bebbed0f9f44d --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/synths/simple-synth-action.ts @@ -0,0 +1,353 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import { Construct } from '@aws-cdk/core'; +import * as path from 'path'; +import { cloudAssemblyBuildSpecDir } from '../private/construct-internals'; +import { copyEnvironmentVariables, filterEmpty } from './_util'; + +/** + * Configuration options for a SimpleSynth + */ +export interface SimpleSynthOptions { + /** + * The source artifact of the CodePipeline + */ + readonly sourceArtifact: codepipeline.Artifact; + + /** + * The artifact where the CloudAssembly should be emitted + */ + readonly cloudAssemblyArtifact: codepipeline.Artifact; + + /** + * Environment variables to send into build + * + * @default - No additional environment variables + */ + readonly environmentVariables?: Record; + + /** + * Environment variables to copy over from parent env + * + * These are environment variables that are being used by the build. + * + * @default - No environment variables copied + */ + readonly copyEnvironmentVariables?: string[]; + + /** + * Name of the build action + * + * @default 'Synth' + */ + readonly actionName?: string; + + /** + * Name of the CodeBuild project + * + * @default - Automatically generated + */ + readonly projectName?: string; + + /** + * Build environment to use for CodeBuild job + * + * @default BuildEnvironment.LinuxBuildImage.STANDARD_1_0 + */ + readonly environment?: codebuild.BuildEnvironment; + + /** + * Directory inside the source where package.json and cdk.json are located + * + * @default - Repository root + */ + readonly subdirectory?: string; + + /** + * Produce additional output artifacts after the build based on the given directories + * + * Can be used to produce additional artifacts during the build step, + * separate from the cloud assembly, which can be used further on in the + * pipeline. + * + * Directories are evaluated with respect to `subdirectory`. + * + * @default - No additional artifacts generated + */ + readonly additionalArtifacts?: AdditionalArtifact[]; +} + +/** + * Construction props for SimpleSynthAction + */ +export interface SimpleSynthActionProps extends SimpleSynthOptions { + /** + * The synth command + */ + readonly synthCommand: string; + + /** + * The install command + * + * @default - No install required + */ + readonly installCommand?: string; + + /** + * The build command + * + * By default, we assume NPM projects are either written in JavaScript or are + * using `ts-node`, so don't need a build command. + * + * Otherwise, put the build command here, for example `npm run build`. + * + * @default - No build required + */ + readonly buildCommand?: string; +} + +/** + * Specification of an additional artifact to generate + */ +export interface AdditionalArtifact { + /** + * Directory to be packaged + */ + readonly directory: string; + + /** + * Artifact to represent the build directory in the pipeline + */ + readonly artifact: codepipeline.Artifact; +} + +/** + * A standard synth with a generated buildspec + */ +export class SimpleSynthAction implements codepipeline.IAction { + + /** + * Create a standard NPM synth action + * + * Uses `npm ci` to install dependencies and `npx cdk synth` to synthesize. + * + * If you need a build step, add `buildCommand: 'npm run build'`. + */ + public static standardNpmSynth(options: StandardNpmSynthOptions) { + return new SimpleSynthAction({ + ...options, + installCommand: options.installCommand ?? 'npm ci', + synthCommand: options.synthCommand ?? 'npx cdk synth', + }); + } + + /** + * Create a standard Yarn synth action + * + * Uses `yarn install --frozen-lockfile` to install dependencies and `npx cdk synth` to synthesize. + * + * If you need a build step, add `buildCommand: 'yarn build'`. + */ + public static standardYarnSynth(options: StandardYarnSynthOptions) { + return new SimpleSynthAction({ + ...options, + installCommand: options.synthCommand ?? 'yarn install --frozen-lockfile', + synthCommand: options.synthCommand ?? 'npx cdk synth', + }); + } + + private _action?: codepipeline_actions.CodeBuildAction; + private _actionProperties: codepipeline.ActionProperties; + + constructor(private readonly props: SimpleSynthActionProps) { + // A number of actionProperties get read before bind() is even called (so before we + // have made the Project and can construct the actual CodeBuildAction) + // + // - actionName + // - resource + // - region + // - category + // - role + // - owner + this._actionProperties = { + actionName: props.actionName ?? 'Synth', + category: codepipeline.ActionCategory.BUILD, + provider: 'CodeBuild', + artifactBounds: { minInputs: 0, maxInputs: 5, minOutputs: 0, maxOutputs: 5 }, + inputs: [props.sourceArtifact], + outputs: [props.cloudAssemblyArtifact, ...(props.additionalArtifacts ?? []).map(a => a.artifact)], + }; + + const addls = props.additionalArtifacts ?? []; + if (Object.keys(addls).length > 0) { + if (!props.cloudAssemblyArtifact.artifactName) { + throw new Error('You must give all output artifacts, including the \'cloudAssemblyArtifact\', names when using \'additionalArtifacts\''); + } + for (const addl of addls) { + if (!addl.artifact.artifactName) { + throw new Error('You must give all output artifacts passed to SimpleSynthAction names when using \'additionalArtifacts\''); + } + } + } + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + return this._actionProperties; + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): codepipeline.ActionConfig { + const buildCommand = this.props.buildCommand; + const synthCommand = this.props.synthCommand; + const installCommand = this.props.installCommand; + + const project = new codebuild.PipelineProject(scope, 'CdkBuildProject', { + projectName: this.props.projectName ?? this.props.projectName, + environment: this.props.environment, + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + pre_build: { + commands: filterEmpty([ + this.props.subdirectory ? `cd ${this.props.subdirectory}` : '', + installCommand, + ]), + }, + build: { + commands: filterEmpty([ + buildCommand, + synthCommand, + ]), + }, + }, + artifacts: renderArtifacts(this), + }), + environmentVariables: { + ...copyEnvironmentVariables(...this.props.copyEnvironmentVariables || []), + ...this.props.environmentVariables, + }, + }); + + this._action = new codepipeline_actions.CodeBuildAction({ + actionName: this.actionProperties.actionName, + input: this.props.sourceArtifact, + outputs: [this.props.cloudAssemblyArtifact, ...(this.props.additionalArtifacts ?? []).map(a => a.artifact)], + project, + }); + this._actionProperties = this._action.actionProperties; + + return this._action.bind(scope, stage, options); + + function renderArtifacts(self: SimpleSynthAction) { + // save the generated files in the output artifact + // This part of the buildspec has to look completely different depending on whether we're + // using secondary artifacts or not. + + const cloudAsmArtifactSpec = { + 'base-directory': path.join(self.props.subdirectory ?? '.', cloudAssemblyBuildSpecDir(scope)), + 'files': '**/*', + }; + + if (self.props.additionalArtifacts) { + const secondary: Record = {}; + if (!self.props.cloudAssemblyArtifact.artifactName) { + throw new Error('When using additional output artifacts, you must also name the CloudAssembly artifact'); + } + secondary[self.props.cloudAssemblyArtifact.artifactName] = cloudAsmArtifactSpec; + self.props.additionalArtifacts.forEach((art) => { + if (!art.artifact.artifactName) { + throw new Error('You must give the output artifact a name'); + } + secondary[art.artifact.artifactName] = { + 'base-directory': path.join(self.props.subdirectory ?? '.', art.directory), + 'files': '**/*', + }; + }); + + return { 'secondary-artifacts': secondary }; + } + + return cloudAsmArtifactSpec; + } + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + if (!this._action) { + throw new Error('Need bind() first'); + } + + return this._action.onStateChange(name, target, options); + } +} + +/** + * Options for a convention-based synth using NPM + */ +export interface StandardNpmSynthOptions extends SimpleSynthOptions { + /** + * The install command + * + * @default 'npm ci' + */ + readonly installCommand?: string; + + /** + * The build command + * + * By default, we assume NPM projects are either written in JavaScript or are + * using `ts-node`, so don't need a build command. + * + * Otherwise, put the build command here, for example `npm run build`. + * + * @default - No build required + */ + readonly buildCommand?: string; + + /** + * The synth command + * + * @default 'npx cdk synth' + */ + readonly synthCommand?: string; +} + +/** + * Options for a convention-based synth using Yarn + */ +export interface StandardYarnSynthOptions extends SimpleSynthOptions { + /** + * The install command + * + * @default 'yarn install --frozen-lockfile' + */ + readonly installCommand?: string; + + /** + * The build command + * + * By default, we assume NPM projects are either written in JavaScript or are + * using `ts-node`, so don't need a build command. + * + * Otherwise, put the build command here, for example `npm run build`. + * + * @default - No build required + */ + readonly buildCommand?: string; + + /** + * The synth command + * + * @default 'npx cdk synth' + */ + readonly synthCommand?: string; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/validation/_files.ts b/packages/@aws-cdk/pipelines/lib/validation/_files.ts new file mode 100644 index 0000000000000..2f2bbf7be35ea --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/validation/_files.ts @@ -0,0 +1,97 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { IGrantable } from '@aws-cdk/aws-iam'; +import * as s3assets from '@aws-cdk/aws-s3-assets'; +import { Construct } from '@aws-cdk/core'; + +/** + * Additional files to use in a shell script + */ +export abstract class Files { + /** + * Use the files from a CodePipeline artifact + */ + public static fromArtifact(artifact: codepipeline.Artifact): Files { + if (!artifact) { + // Typechecking may mess up + throw new Error('Files.fromArtifact(): input artifact is required, got undefined'); + } + + return { + bind: () => ({ artifact }), + grantRead: () => { /* Not necessary */ }, + }; + } + + /** + * Create a new asset to bundle up the files in a directory on disk + */ + public static fromDirectory(directoryPath: string): Files { + let realFiles: Files; + return { + bind(scope: Construct) { + realFiles = Files.fromAsset(new s3assets.Asset(scope, directoryPath, { + path: directoryPath, + })); + + return realFiles.bind(scope); + }, + grantRead(grantee: IGrantable) { + if (!realFiles) { + throw new Error('bind() must be called first'); + } + realFiles.grantRead(grantee); + }, + }; + } + + /** + * Use an existing asset as a file source + */ + public static fromAsset(asset: s3assets.Asset): Files { + return { + bind: () => ({ + commands: [ + `echo "Downloading additional files from ${asset.s3ObjectUrl}"`, + `aws s3 cp ${asset.s3ObjectUrl} /tmp/dl.zip`, + 'unzip /tmp/dl.zip -d .', + ], + }), + grantRead: (grantee) => asset.grantRead(grantee), + }; + } + + protected constructor() { + } + + /** + * Bind the Files to a usage location + */ + public abstract bind(scope: Construct): FilesConfig; + + /** + * Grant read permissions to the file set to the given grantable + * + * Must be called after bind(). + */ + + public abstract grantRead(grantee: IGrantable): void; +} + +/** + * Config for a Files source + */ +export interface FilesConfig { + /** + * CodePipeline artifact to add to the set of input artifacts for the project + * + * @default - No artifact + */ + readonly artifact?: codepipeline.Artifact; + + /** + * Commands to add to the set of commands for the project + * + * @default - No commands + */ + readonly commands?: string[]; +} diff --git a/packages/@aws-cdk/pipelines/lib/validation/index.ts b/packages/@aws-cdk/pipelines/lib/validation/index.ts new file mode 100644 index 0000000000000..f2751fc92af49 --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/validation/index.ts @@ -0,0 +1 @@ +export * from './shell-script-action'; \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/lib/validation/shell-script-action.ts b/packages/@aws-cdk/pipelines/lib/validation/shell-script-action.ts new file mode 100644 index 0000000000000..301e641cb15fa --- /dev/null +++ b/packages/@aws-cdk/pipelines/lib/validation/shell-script-action.ts @@ -0,0 +1,183 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as events from '@aws-cdk/aws-events'; +import { Construct } from '@aws-cdk/core'; +import { StackOutput } from '../stage'; + +/** + * Properties for ShellScriptValidation + */ +export interface ShellScriptActionProps { + /** + * Name of the validation action in the pipeline + */ + readonly actionName: string; + + /** + * Stack outputs to make available as environment variables + * + * @default - No outputs used + */ + readonly useOutputs?: Record; + + /** + * Commands to run + */ + readonly commands: string[]; + + /** + * Bash options to set at the start of the script + * + * @default '-eu' (errexit and nounset) + */ + readonly bashOptions?: string; + + /** + * Additional artifacts to use as input for the CodeBuild project + * + * You can use these files to load more complex test sets into the + * shellscript build environment. + * + * The files artifact given here will be unpacked into the current + * working directory, the other ones will be unpacked into directories + * which are available through the environment variables + * $CODEBUILD_SRC_DIR_. + * + * The CodeBuild job must have at least one input artifact, so you + * must provide either at least one additional artifact here or one + * stack output using `useOutput`. + * + * @default - No additional artifacts + */ + readonly additionalArtifacts?: codepipeline.Artifact[]; + + /** + * RunOrder for this action + * + * Use this to sequence the shell script after the deployments. + * + * The default value is 100 so you don't have to supply the value if you just + * want to run this after the application stacks have been deployed, and you + * don't have more than 100 stacks. + * + * @default 100 + */ + readonly runOrder?: number; +} + +/** + * Validate a revision using shell commands + */ +export class ShellScriptAction implements codepipeline.IAction { + private _project?: codebuild.IProject; + + private _action?: codepipeline_actions.CodeBuildAction; + private _actionProperties: codepipeline.ActionProperties; + + constructor(private readonly props: ShellScriptActionProps) { + // A number of actionProperties get read before bind() is even called (so before we + // have made the Project and can construct the actual CodeBuildAction) + // + // - actionName + // - resource + // - region + // - category + // - role + // - owner + this._actionProperties = { + actionName: props.actionName, + category: codepipeline.ActionCategory.BUILD, + provider: 'CodeBuild', + artifactBounds: { minInputs: 0, maxInputs: 5, minOutputs: 0, maxOutputs: 5 }, + inputs: [], + outputs: [], + }; + + if (Object.keys(props.useOutputs ?? {}).length + (props.additionalArtifacts ?? []).length === 0) { + throw new Error('You must supply either \'useOutputs\' or \'additionalArtifacts\', since a CodeBuild Action must always have at least one input artifact.'); + } + } + + /** + * Exists to implement IAction + */ + public get actionProperties(): codepipeline.ActionProperties { + return this._actionProperties; + } + + /** + * Exists to implement IAction + */ + public bind(scope: Construct, stage: codepipeline.IStage, options: codepipeline.ActionBindOptions): codepipeline.ActionConfig { + const inputs = new Array(); + inputs.push(...this.props.additionalArtifacts ?? []); + + const envVarCommands = new Array(); + + const bashOptions = this.props.bashOptions ?? '-eu'; + if (bashOptions) { + envVarCommands.push(`set ${bashOptions}`); + } + for (const [varName, output] of Object.entries(this.props.useOutputs ?? {})) { + const outputArtifact = output.artifactFile; + + // Add the artifact to the list of inputs, if it's not in there already. Determine + // the location where CodeBuild is going to stick it based on whether it's the first (primary) + // input or an 'extra input', then parse. + let artifactIndex = inputs.findIndex(a => a.artifactName === outputArtifact.artifact.artifactName); + if (artifactIndex === -1) { + artifactIndex = inputs.push(outputArtifact.artifact) - 1; + } + const dirEnv = artifactIndex === 0 ? 'CODEBUILD_SRC_DIR' : `CODEBUILD_SRC_DIR_${outputArtifact.artifact.artifactName}`; + envVarCommands.push(`export ${varName}="$(node -pe 'require(process.env.${dirEnv} + "/${outputArtifact.fileName}")["${output.outputName}"]')"`); + } + + this._project = new codebuild.PipelineProject(scope, 'Project', { + buildSpec: codebuild.BuildSpec.fromObject({ + version: '0.2', + phases: { + build: { + commands: [ + ...envVarCommands, + ...this.props.commands, + ], + }, + }, + }), + }); + + this._action = new codepipeline_actions.CodeBuildAction({ + actionName: this.props.actionName, + input: inputs[0], + extraInputs: inputs.slice(1), + runOrder: this.props.runOrder ?? 100, + project: this._project, + }); + // Replace the placeholder actionProperties at the last minute + this._actionProperties = this._action.actionProperties; + + return this._action.bind(scope, stage, options); + } + + /** + * Project generated to run the shell script in + */ + public get project(): codebuild.IProject { + if (!this._project) { + throw new Error('Project becomes available after ShellScriptAction has been bound to a stage'); + } + return this._project; + } + + /** + * Exists to implement IAction + */ + public onStateChange(name: string, target?: events.IRuleTarget, options?: events.RuleProps): events.Rule { + if (!this._action) { + throw new Error('Need bind() first'); + } + + return this._action.onStateChange(name, target, options); + } +} diff --git a/packages/@aws-cdk/pipelines/package.json b/packages/@aws-cdk/pipelines/package.json new file mode 100644 index 0000000000000..b6a5f4a51acfa --- /dev/null +++ b/packages/@aws-cdk/pipelines/package.json @@ -0,0 +1,119 @@ +{ + "name": "@aws-cdk/pipelines", + "version": "0.0.0", + "description": "Continuous Delivery of CDK applications", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-cdk.git", + "directory": "packages/@aws-cdk/pipelines" + }, + "bin": {}, + "scripts": { + "build": "cdk-build", + "watch": "cdk-watch", + "lint": "cdk-lint", + "test": "cdk-test", + "integ": "cdk-integ", + "pkglint": "pkglint -f", + "package": "cdk-package", + "awslint": "cdk-awslint", + "build+test+package": "npm run build+test && npm run package", + "build+test": "npm run build && npm test", + "compat": "cdk-compat" + }, + "author": { + "name": "Amazon Web Services", + "url": "https://aws.amazon.com", + "organization": true + }, + "devDependencies": { + "@aws-cdk/assert": "0.0.0", + "@types/nodeunit": "^0.0.31", + "cdk-build-tools": "0.0.0", + "cdk-integ-tools": "0.0.0", + "cfn2ts": "0.0.0", + "nodeunit": "^0.11.3", + "pkglint": "0.0.0", + "@aws-cdk/aws-s3": "0.0.0", + "@aws-cdk/aws-ecr-assets": "0.0.0" + }, + "peerDependencies": { + "constructs": "^3.0.2", + "@aws-cdk/core": "0.0.0", + "@aws-cdk/aws-codebuild": "0.0.0", + "@aws-cdk/aws-codepipeline": "0.0.0", + "@aws-cdk/aws-codepipeline-actions": "0.0.0", + "@aws-cdk/aws-events": "0.0.0", + "@aws-cdk/aws-iam": "0.0.0", + "@aws-cdk/cloud-assembly-schema": "0.0.0", + "@aws-cdk/aws-s3-assets": "0.0.0", + "@aws-cdk/cx-api": "0.0.0", + "@aws-cdk/aws-cloudformation": "0.0.0" + }, + "dependencies": { + "constructs": "^3.0.2", + "@aws-cdk/core": "0.0.0", + "@aws-cdk/aws-codebuild": "0.0.0", + "@aws-cdk/aws-codepipeline": "0.0.0", + "@aws-cdk/aws-codepipeline-actions": "0.0.0", + "@aws-cdk/cloud-assembly-schema": "0.0.0", + "@aws-cdk/aws-events": "0.0.0", + "@aws-cdk/aws-iam": "0.0.0", + "@aws-cdk/aws-s3-assets": "0.0.0", + "@aws-cdk/cx-api": "0.0.0", + "@aws-cdk/aws-cloudformation": "0.0.0" + }, + "bundledDependencies": [], + "keywords": [ + "aws", + "cdk", + "constructs", + "pipelines", + "cicd", + "continuous", + "delivery" + ], + "engines": { + "node": ">= 10.13.0 <13 || >=13.7.0" + }, + "license": "Apache-2.0", + "stability": "experimental", + "maturity": "developer-preview", + "jsii": { + "outdir": "dist", + "targets": { + "java": { + "package": "software.amazon.awscdk.pipelines", + "maven": { + "groupId": "software.amazon.awscdk", + "artifactId": "cdk-pipelines" + } + }, + "dotnet": { + "namespace": "Amazon.CDK.Pipelines", + "packageId": "Amazon.CDK.Pipelines", + "signAssembly": true, + "assemblyOriginatorKeyFile": "../../key.snk", + "iconUrl": "https://raw.githubusercontent.com/aws/aws-cdk/master/logo/default-256-dark.png" + }, + "python": { + "distName": "aws-cdk.pipelines", + "module": "aws_cdk.pipelines" + } + } + }, + "awscdkio": { + "announce": false + }, + "awslint": { + "exclude": [ + "events-generic:@aws-cdk/pipelines.PublishAssetsAction", + "events-method-signature:@aws-cdk/pipelines.PublishAssetsAction.onStateChange", + "events-generic:@aws-cdk/pipelines.UpdatePipelineAction", + "events-method-signature:@aws-cdk/pipelines.UpdatePipelineAction.onStateChange" + ] + }, + "homepage": "https://github.com/aws/aws-cdk" +} diff --git a/packages/@aws-cdk/pipelines/test/builds.test.ts b/packages/@aws-cdk/pipelines/test/builds.test.ts new file mode 100644 index 0000000000000..95f1fddf11ad9 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/builds.test.ts @@ -0,0 +1,142 @@ +import { arrayWith, deepObjectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { Stack } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { encodedJson } from './testmatchers'; +import { PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let sourceArtifact: codepipeline.Artifact; +let cloudAssemblyArtifact: codepipeline.Artifact; + +beforeEach(() => { + app = new TestApp({ outdir: 'testcdk.out' }); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + sourceArtifact = new codepipeline.Artifact(); + cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test.each([['npm'], ['yarn']])('%s build automatically determines artifact base-directory', (npmYarn) => { + // WHEN + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: npmYarnBuild(npmYarn)({ sourceArtifact, cloudAssemblyArtifact }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + artifacts: { + 'base-directory': 'testcdk.out', + }, + })), + }, + }); +}); + +test.each([['npm'], ['yarn']])('%s build respects subdirectory', (npmYarn) => { + // WHEN + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: npmYarnBuild(npmYarn)({ + sourceArtifact, + cloudAssemblyArtifact, + subdirectory: 'subdir', + }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + pre_build: { + commands: arrayWith('cd subdir'), + }, + }, + artifacts: { + 'base-directory': 'subdir/testcdk.out', + }, + })), + }, + }); +}); + +test.each([['npm'], ['yarn']])('%s assumes no build step by default', (npmYarn) => { + // WHEN + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: npmYarnBuild(npmYarn)({ sourceArtifact, cloudAssemblyArtifact }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: ['npx cdk synth'], + }, + }, + })), + }, + }); +}); + +test('Standard (NPM) synth can output additional artifacts', () => { + // WHEN + sourceArtifact = new codepipeline.Artifact(); + cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); + + const addlArtifact = new codepipeline.Artifact('IntegTest'); + new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + additionalArtifacts: [ + { + artifact: addlArtifact, + directory: 'test', + }, + ], + }), + }); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + artifacts: { + 'secondary-artifacts': { + CloudAsm: { + 'base-directory': 'testcdk.out', + 'files': '**/*', + }, + IntegTest: { + 'base-directory': 'test', + 'files': '**/*', + }, + }, + }, + })), + }, + }); +}); + +function npmYarnBuild(npmYarn: string) { + if (npmYarn === 'npm') { return cdkp.SimpleSynthAction.standardNpmSynth; } + if (npmYarn === 'yarn') { return cdkp.SimpleSynthAction.standardYarnSynth; } + throw new Error(`Expecting npm|yarn: ${npmYarn}`); +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/cross-environment-infra.test.ts b/packages/@aws-cdk/pipelines/test/cross-environment-infra.test.ts new file mode 100644 index 0000000000000..b891574d6e3a6 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/cross-environment-infra.test.ts @@ -0,0 +1,76 @@ +import { arrayWith, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import { Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { stringLike } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('in a cross-account/cross-region setup, artifact bucket can be read by deploy role', () => { + // WHEN + pipeline.addApplicationStage(new TestApplication(app, 'MyApp', { + env: { account: '321elsewhere', region: 'us-elsewhere' }, + })); + + // THEN + app.synth(); + const supportStack = app.node.findAll().filter(Stack.isStack).find(s => s.stackName === 'PipelineStack-support-us-elsewhere'); + expect(supportStack).not.toBeUndefined(); + + expect(supportStack).toHaveResourceLike('AWS::S3::BucketPolicy', { + PolicyDocument: { + Statement: arrayWith(objectLike({ + Action: arrayWith('s3:GetObject*', 's3:GetBucket*', 's3:List*'), + Principal: { + AWS: { + 'Fn::Sub': stringLike('*-deploy-role-*'), + }, + }, + })), + }, + }); +}); + +test('in a cross-account/same-region setup, artifact bucket can be read by deploy role', () => { + // WHEN + pipeline.addApplicationStage(new TestApplication(app, 'MyApp', { + env: { account: '321elsewhere', region: PIPELINE_ENV.region }, + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::S3::BucketPolicy', { + PolicyDocument: { + Statement: arrayWith(objectLike({ + Action: ['s3:GetObject*', 's3:GetBucket*', 's3:List*'], + Principal: { + AWS: { + 'Fn::Sub': stringLike('*-deploy-role-*'), + }, + }, + })), + }, + }); +}); + +/** + * Our application + */ +class TestApplication extends Stage { + constructor(scope: Construct, id: string, props: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack'); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/integ.pipeline.expected.json b/packages/@aws-cdk/pipelines/test/integ.pipeline.expected.json new file mode 100644 index 0000000000000..1e212d090d82a --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/integ.pipeline.expected.json @@ -0,0 +1,1316 @@ +{ + "Resources": { + "PipelineUpdatePipelineSelfMutationRole57E559E8": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codebuild.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineUpdatePipelineSelfMutationRoleDefaultPolicyA225DA4E": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + } + ] + ] + }, + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + }, + ":*" + ] + ] + } + ] + }, + { + "Action": [ + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases" + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codebuild:test-region:12345678:report-group/", + { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + }, + "-*" + ] + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": [ + "arn:*:iam::*:role/*-deploy-role-*", + "arn:*:iam::*:role/*-publishing-role-*" + ] + }, + { + "Action": "cloudformation:DescribeStacks", + "Effect": "Allow", + "Resource": "*" + }, + { + "Action": "s3:ListBucket", + "Effect": "Allow", + "Resource": "*" + }, + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineUpdatePipelineSelfMutationRoleDefaultPolicyA225DA4E", + "Roles": [ + { + "Ref": "PipelineUpdatePipelineSelfMutationRole57E559E8" + } + ] + } + }, + "PipelineUpdatePipelineSelfMutationDAA41400": { + "Type": "AWS::CodeBuild::Project", + "Properties": { + "Artifacts": { + "Type": "CODEPIPELINE" + }, + "Environment": { + "ComputeType": "BUILD_GENERAL1_SMALL", + "Image": "aws/codebuild/standard:1.0", + "PrivilegedMode": false, + "Type": "LINUX_CONTAINER" + }, + "ServiceRole": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationRole57E559E8", + "Arn" + ] + }, + "Source": { + "BuildSpec": "{\n \"version\": \"0.2\",\n \"phases\": {\n \"install\": {\n \"commands\": \"npm install -g aws-cdk\"\n },\n \"build\": {\n \"commands\": [\n \"cdk -a . deploy PipelineStack --require-approval=never --verbose\"\n ]\n }\n }\n}", + "Type": "CODEPIPELINE" + }, + "EncryptionKey": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + }, + "PipelineArtifactsBucketEncryptionKeyF5BF0670": { + "Type": "AWS::KMS::Key", + "Properties": { + "KeyPolicy": { + "Statement": [ + { + "Action": [ + "kms:Create*", + "kms:Describe*", + "kms:Enable*", + "kms:List*", + "kms:Put*", + "kms:Update*", + "kms:Revoke*", + "kms:Disable*", + "kms:Get*", + "kms:Delete*", + "kms:ScheduleKeyDeletion", + "kms:CancelKeyDeletion", + "kms:GenerateDataKey", + "kms:TagResource", + "kms:UntagResource" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineRoleB27FAA37", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProjectRole231EEA2A", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProjectRole231EEA2A", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationRole57E559E8", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationRole57E559E8", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProjectRole69B20A71", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProjectRole69B20A71", + "Arn" + ] + } + }, + "Resource": "*" + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + } + }, + "Resource": "*" + } + ], + "Version": "2012-10-17" + } + }, + "UpdateReplacePolicy": "Delete", + "DeletionPolicy": "Delete" + }, + "PipelineArtifactsBucketAEA9A052": { + "Type": "AWS::S3::Bucket", + "Properties": { + "BucketEncryption": { + "ServerSideEncryptionConfiguration": [ + { + "ServerSideEncryptionByDefault": { + "KMSMasterKeyID": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + }, + "SSEAlgorithm": "aws:kms" + } + } + ] + }, + "PublicAccessBlockConfiguration": { + "BlockPublicAcls": true, + "BlockPublicPolicy": true, + "IgnorePublicAcls": true, + "RestrictPublicBuckets": true + } + }, + "UpdateReplacePolicy": "Retain", + "DeletionPolicy": "Retain" + }, + "PipelineArtifactsBucketEncryptionKeyAlias94A07392": { + "Type": "AWS::KMS::Alias", + "Properties": { + "AliasName": "alias/codepipeline-pipelinestackpipelinee95eedaa", + "TargetKeyId": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + "UpdateReplacePolicy": "Delete", + "DeletionPolicy": "Delete" + }, + "PipelineRoleB27FAA37": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codepipeline.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineRoleDefaultPolicy7BDC1ABB": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*", + "s3:DeleteObject*", + "s3:PutObject*", + "s3:Abort*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineBuildSynthCodePipelineActionRole4E7A6C97", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA", + "Arn" + ] + } + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineRoleDefaultPolicy7BDC1ABB", + "Roles": [ + { + "Ref": "PipelineRoleB27FAA37" + } + ] + } + }, + "Pipeline9850B417": { + "Type": "AWS::CodePipeline::Pipeline", + "Properties": { + "RoleArn": { + "Fn::GetAtt": [ + "PipelineRoleB27FAA37", + "Arn" + ] + }, + "Stages": [ + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Source", + "Owner": "ThirdParty", + "Provider": "GitHub", + "Version": "1" + }, + "Configuration": { + "Owner": "OWNER", + "Repo": "REPO", + "Branch": "master", + "OAuthToken": "not-a-secret", + "PollForSourceChanges": true + }, + "Name": "GitHub", + "OutputArtifacts": [ + { + "Name": "Artifact_Source_GitHub" + } + ], + "RunOrder": 1 + } + ], + "Name": "Source" + }, + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Build", + "Owner": "AWS", + "Provider": "CodeBuild", + "Version": "1" + }, + "Configuration": { + "ProjectName": { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + } + }, + "InputArtifacts": [ + { + "Name": "Artifact_Source_GitHub" + } + ], + "Name": "Synth", + "OutputArtifacts": [ + { + "Name": "CloudAsm" + }, + { + "Name": "IntegTests" + } + ], + "RoleArn": { + "Fn::GetAtt": [ + "PipelineBuildSynthCodePipelineActionRole4E7A6C97", + "Arn" + ] + }, + "RunOrder": 1 + } + ], + "Name": "Build" + }, + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Build", + "Owner": "AWS", + "Provider": "CodeBuild", + "Version": "1" + }, + "Configuration": { + "ProjectName": { + "Ref": "PipelineUpdatePipelineSelfMutationDAA41400" + } + }, + "InputArtifacts": [ + { + "Name": "CloudAsm" + } + ], + "Name": "SelfMutate", + "RoleArn": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF", + "Arn" + ] + }, + "RunOrder": 1 + } + ], + "Name": "UpdatePipeline" + }, + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Build", + "Owner": "AWS", + "Provider": "CodeBuild", + "Version": "1" + }, + "Configuration": { + "ProjectName": { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + } + }, + "InputArtifacts": [ + { + "Name": "Artifact_Source_GitHub" + } + ], + "Name": "UseSource", + "RoleArn": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA", + "Arn" + ] + }, + "RunOrder": 100 + }, + { + "ActionTypeId": { + "Category": "Deploy", + "Owner": "AWS", + "Provider": "CloudFormation", + "Version": "1" + }, + "Configuration": { + "StackName": "PreProd-Stack", + "Capabilities": "CAPABILITY_NAMED_IAM,CAPABILITY_AUTO_EXPAND", + "RoleArn": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-${AWS::Region}" + }, + "ActionMode": "CHANGE_SET_REPLACE", + "ChangeSetName": "PipelineChange", + "TemplatePath": "CloudAsm::assembly-PipelineStack-PreProd/PipelineStackPreProdStack65A0AD1F.template.json" + }, + "InputArtifacts": [ + { + "Name": "CloudAsm" + } + ], + "Name": "Stack.Prepare", + "RoleArn": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + }, + "RunOrder": 1 + }, + { + "ActionTypeId": { + "Category": "Deploy", + "Owner": "AWS", + "Provider": "CloudFormation", + "Version": "1" + }, + "Configuration": { + "StackName": "PreProd-Stack", + "ActionMode": "CHANGE_SET_EXECUTE", + "ChangeSetName": "PipelineChange" + }, + "Name": "Stack.Deploy", + "RoleArn": { + "Fn::Sub": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}" + }, + "RunOrder": 2 + } + ], + "Name": "PreProd" + } + ], + "ArtifactStore": { + "EncryptionKey": { + "Id": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + }, + "Type": "KMS" + }, + "Location": { + "Ref": "PipelineArtifactsBucketAEA9A052" + }, + "Type": "S3" + }, + "RestartExecutionOnUpdate": true + }, + "DependsOn": [ + "PipelineRoleDefaultPolicy7BDC1ABB", + "PipelineRoleB27FAA37" + ] + }, + "PipelineBuildSynthCodePipelineActionRole4E7A6C97": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineBuildSynthCodePipelineActionRoleDefaultPolicy92C90290": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "codebuild:BatchGetBuilds", + "codebuild:StartBuild", + "codebuild:StopBuild" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProject6BEFA8E6", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineBuildSynthCodePipelineActionRoleDefaultPolicy92C90290", + "Roles": [ + { + "Ref": "PipelineBuildSynthCodePipelineActionRole4E7A6C97" + } + ] + } + }, + "PipelineBuildSynthCdkBuildProjectRole231EEA2A": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codebuild.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineBuildSynthCdkBuildProjectRoleDefaultPolicyFB6C941C": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + } + ] + ] + }, + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + }, + ":*" + ] + ] + } + ] + }, + { + "Action": [ + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases" + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codebuild:test-region:12345678:report-group/", + { + "Ref": "PipelineBuildSynthCdkBuildProject6BEFA8E6" + }, + "-*" + ] + ] + } + }, + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*", + "s3:DeleteObject*", + "s3:PutObject*", + "s3:Abort*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineBuildSynthCdkBuildProjectRoleDefaultPolicyFB6C941C", + "Roles": [ + { + "Ref": "PipelineBuildSynthCdkBuildProjectRole231EEA2A" + } + ] + } + }, + "PipelineBuildSynthCdkBuildProject6BEFA8E6": { + "Type": "AWS::CodeBuild::Project", + "Properties": { + "Artifacts": { + "Type": "CODEPIPELINE" + }, + "Environment": { + "ComputeType": "BUILD_GENERAL1_SMALL", + "Image": "aws/codebuild/standard:1.0", + "PrivilegedMode": false, + "Type": "LINUX_CONTAINER" + }, + "ServiceRole": { + "Fn::GetAtt": [ + "PipelineBuildSynthCdkBuildProjectRole231EEA2A", + "Arn" + ] + }, + "Source": { + "BuildSpec": "{\n \"version\": \"0.2\",\n \"phases\": {\n \"pre_build\": {\n \"commands\": [\n \"npm ci\"\n ]\n },\n \"build\": {\n \"commands\": [\n \"npx cdk synth\"\n ]\n }\n },\n \"artifacts\": {\n \"secondary-artifacts\": {\n \"CloudAsm\": {\n \"base-directory\": \"cdk.out\",\n \"files\": \"**/*\"\n },\n \"IntegTests\": {\n \"base-directory\": \"test\",\n \"files\": \"**/*\"\n }\n }\n }\n}", + "Type": "CODEPIPELINE" + }, + "EncryptionKey": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + }, + "Name": "MyServicePipeline-synth" + } + }, + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleDefaultPolicyE626265B": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "codebuild:BatchGetBuilds", + "codebuild:StartBuild", + "codebuild:StopBuild" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineUpdatePipelineSelfMutationDAA41400", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleDefaultPolicyE626265B", + "Roles": [ + { + "Ref": "PipelineUpdatePipelineSelfMutateCodePipelineActionRoleD6D4E5CF" + } + ] + } + }, + "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "AWS": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":iam::12345678:root" + ] + ] + } + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelinePreProdUseSourceCodePipelineActionRoleDefaultPolicy9BE325AD": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "codebuild:BatchGetBuilds", + "codebuild:StartBuild", + "codebuild:StopBuild" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProject2E711EB4", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelinePreProdUseSourceCodePipelineActionRoleDefaultPolicy9BE325AD", + "Roles": [ + { + "Ref": "PipelinePreProdUseSourceCodePipelineActionRoleA2043BDA" + } + ] + } + }, + "PipelinePreProdUseSourceProjectRole69B20A71": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codebuild.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelinePreProdUseSourceProjectRoleDefaultPolicy50F68DF3": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + } + ] + ] + }, + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:test-region:12345678:log-group:/aws/codebuild/", + { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + }, + ":*" + ] + ] + } + ] + }, + { + "Action": [ + "codebuild:CreateReportGroup", + "codebuild:CreateReport", + "codebuild:UpdateReport", + "codebuild:BatchPutTestCases" + ], + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codebuild:test-region:12345678:report-group/", + { + "Ref": "PipelinePreProdUseSourceProject2E711EB4" + }, + "-*" + ] + ] + } + }, + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "PipelineArtifactsBucketAEA9A052", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": [ + "kms:Decrypt", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + }, + { + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:ReEncrypt*", + "kms:GenerateDataKey*" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelinePreProdUseSourceProjectRoleDefaultPolicy50F68DF3", + "Roles": [ + { + "Ref": "PipelinePreProdUseSourceProjectRole69B20A71" + } + ] + } + }, + "PipelinePreProdUseSourceProject2E711EB4": { + "Type": "AWS::CodeBuild::Project", + "Properties": { + "Artifacts": { + "Type": "CODEPIPELINE" + }, + "Environment": { + "ComputeType": "BUILD_GENERAL1_SMALL", + "Image": "aws/codebuild/standard:1.0", + "PrivilegedMode": false, + "Type": "LINUX_CONTAINER" + }, + "ServiceRole": { + "Fn::GetAtt": [ + "PipelinePreProdUseSourceProjectRole69B20A71", + "Arn" + ] + }, + "Source": { + "BuildSpec": "{\n \"version\": \"0.2\",\n \"phases\": {\n \"build\": {\n \"commands\": [\n \"set -eu\",\n \"cat README.md\"\n ]\n }\n }\n}", + "Type": "CODEPIPELINE" + }, + "EncryptionKey": { + "Fn::GetAtt": [ + "PipelineArtifactsBucketEncryptionKeyF5BF0670", + "Arn" + ] + } + } + } + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/integ.pipeline.ts b/packages/@aws-cdk/pipelines/test/integ.pipeline.ts new file mode 100644 index 0000000000000..f0a4da9dde073 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/integ.pipeline.ts @@ -0,0 +1,80 @@ +/// !cdk-integ PipelineStack +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import { App, CfnResource, Construct, SecretValue, Stack, StackProps, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; + +class MyStage extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const stack = new Stack(this, 'Stack'); + new CfnResource(stack, 'Resource', { + type: 'AWS::Test::SomeResource', + }); + } +} + +/** + * The stack that defines the application pipeline + */ +class CdkpipelinesDemoPipelineStack extends Stack { + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + const sourceArtifact = new codepipeline.Artifact(); + const cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); + const integTestArtifact = new codepipeline.Artifact('IntegTests'); + + const pipeline = new cdkp.CdkPipeline(this, 'Pipeline', { + cloudAssemblyArtifact, + + // Where the source can be found + sourceAction: new codepipeline_actions.GitHubSourceAction({ + actionName: 'GitHub', + output: sourceArtifact, + oauthToken: SecretValue.plainText('not-a-secret'), + owner: 'OWNER', + repo: 'REPO', + trigger: codepipeline_actions.GitHubTrigger.POLL, + }), + + // How it will be built + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + projectName: 'MyServicePipeline-synth', + additionalArtifacts: [ + { + directory: 'test', + artifact: integTestArtifact, + }, + ], + }), + }); + + // This is where we add the application stages + // ... + const stage = pipeline.addApplicationStage(new MyStage(this, 'PreProd')); + stage.addActions( + new cdkp.ShellScriptAction({ + actionName: 'UseSource', + commands: [ + // Comes from source + 'cat README.md', + ], + additionalArtifacts: [sourceArtifact], + }), + ); + } +} + +const app = new App({ + context: { + '@aws-cdk/core:newStyleStackSynthesis': 'true', + }, +}); +new CdkpipelinesDemoPipelineStack(app, 'PipelineStack', { + env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, +}); +app.synth(); diff --git a/packages/@aws-cdk/pipelines/test/pipeline-assets.test.ts b/packages/@aws-cdk/pipelines/test/pipeline-assets.test.ts new file mode 100644 index 0000000000000..c1fb0b79b13f9 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/pipeline-assets.test.ts @@ -0,0 +1,216 @@ +import { arrayWith, deepObjectLike, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import * as ecr_assets from '@aws-cdk/aws-ecr-assets'; +import * as s3_assets from '@aws-cdk/aws-s3-assets'; +import { Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as path from 'path'; +import * as cdkp from '../lib'; +import { encodedJson, notMatching, stringLike } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +const FILE_ASSET_SOURCE_HASH = '8289faf53c7da377bb2b90615999171adef5e1d8f6b88810e5fef75e6ca09ba5'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('no assets stage if the application has no assets', () => { + // WHEN + pipeline.addApplicationStage(new PlainStackApp(app, 'App')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: notMatching(arrayWith(objectLike({ + Name: 'Assets', + }))), + }); +}); + +test('command line properly locates assets in subassembly', () => { + // WHEN + pipeline.addApplicationStage(new FileAssetApp(app, 'FileAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + // tslint:disable-next-line: max-line-length + commands: arrayWith(`cdk-assets --path "assembly-FileAssetApp/FileAssetAppStackEADD68C5.assets.json" --verbose publish "${FILE_ASSET_SOURCE_HASH}:current_account-current_region"`), + }, + }, + })), + }, + }); +}); + +test('multiple assets are published in parallel', () => { + // WHEN + pipeline.addApplicationStage(new TwoFileAssetsApp(app, 'FileAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Assets', + Actions: [ + objectLike({ RunOrder: 1 }), + objectLike({ RunOrder: 1 }), + ], + }), + }); +}); + +test('assets are also published when using the lower-level addStackArtifactDeployment', () => { + // GIVEN + const asm = new FileAssetApp(app, 'FileAssetApp').synth(); + + // WHEN + pipeline.addStage('SomeStage').addStackArtifactDeployment(asm.getStackByName('FileAssetApp-Stack')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Assets', + Actions: [ + objectLike({ + Name: FILE_ASSET_SOURCE_HASH, + RunOrder: 1, + }), + ], + }), + }); +}); + +test('file image asset publishers do not use privilegedmode, have right AssumeRole', () => { + // WHEN + pipeline.addApplicationStage(new FileAssetApp(app, 'FileAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: arrayWith(stringLike('cdk-assets *')), + }, + }, + })), + }, + Environment: objectLike({ + PrivilegedMode: false, + }), + }); + + expect(pipelineStack).toHaveResourceLike('AWS::IAM::Policy', { + PolicyDocument: { + Statement: arrayWith({ + Action: 'sts:AssumeRole', + Effect: 'Allow', + Resource: 'arn:*:iam::*:role/*-file-publishing-role-*', + }), + }, + }); +}); + +test('docker image asset publishers use privilegedmode, have right AssumeRole', () => { + // WHEN + pipeline.addApplicationStage(new DockerAssetApp(app, 'DockerAssetApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: arrayWith(stringLike('cdk-assets *')), + }, + }, + })), + }, + Environment: objectLike({ + PrivilegedMode: true, + }), + }); + expect(pipelineStack).toHaveResourceLike('AWS::IAM::Policy', { + PolicyDocument: { + Statement: arrayWith({ + Action: 'sts:AssumeRole', + Effect: 'Allow', + Resource: 'arn:*:iam::*:role/*-image-publishing-role-*', + }), + }, + }); +}); + +test('can control fix/CLI version used in pipeline selfupdate', () => { + // WHEN + const stack2 = new Stack(app, 'Stack2', { env: PIPELINE_ENV }); + const pipeline2 = new TestGitHubNpmPipeline(stack2, 'Cdk2', { + cdkCliVersion: '1.2.3', + }); + pipeline2.addApplicationStage(new FileAssetApp(stack2, 'FileAssetApp')); + + // THEN + expect(stack2).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + install: { + commands: 'npm install -g cdk-assets@1.2.3', + }, + }, + })), + }, + }); +}); + +class PlainStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack'); + } +} + +class FileAssetApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new Stack(this, 'Stack'); + new s3_assets.Asset(stack, 'Asset', { + path: path.join(__dirname, 'test-file-asset.txt'), + }); + } +} + +class TwoFileAssetsApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new Stack(this, 'Stack'); + new s3_assets.Asset(stack, 'Asset1', { + path: path.join(__dirname, 'test-file-asset.txt'), + }); + new s3_assets.Asset(stack, 'Asset2', { + path: path.join(__dirname, 'test-file-asset-two.txt'), + }); + } +} + +class DockerAssetApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new Stack(this, 'Stack'); + new ecr_assets.DockerImageAsset(stack, 'Asset', { + directory: path.join(__dirname, 'test-docker-asset'), + }); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/pipeline.test.ts b/packages/@aws-cdk/pipelines/test/pipeline.test.ts new file mode 100644 index 0000000000000..2196726c91180 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/pipeline.test.ts @@ -0,0 +1,286 @@ +import { arrayWith, deepObjectLike, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import { Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { anything, encodedJson, stringLike } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, stackTemplate, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('references stack template in subassembly', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'App')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'App', + Actions: arrayWith( + objectLike({ + Name: 'Stack.Prepare', + InputArtifacts: [objectLike({})], + Configuration: objectLike({ + StackName: 'App-Stack', + TemplatePath: stringLike('*::assembly-App/*.template.json'), + }), + }), + ), + }), + }); +}); + +// tslint:disable: max-line-length +test('action has right settings for same-env deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'Same')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Same', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'Same-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-${AWS::Region}' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'Same-Stack', + }), + }), + ], + }), + }); +}); + +test('action has right settings for cross-account deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'CrossAccount', { env: { account: 'you' }})); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'CrossAccount', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'CrossAccount-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-cfn-exec-role-you-${AWS::Region}' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-${AWS::Region}' }, + Configuration: objectLike({ + StackName: 'CrossAccount-Stack', + }), + }), + ], + }), + }); +}); + +test('action has right settings for cross-region deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'CrossRegion', { env: { region: 'elsewhere' }})); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'CrossRegion', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossRegion-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-elsewhere' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossRegion-Stack', + }), + }), + ], + }), + }); +}); + +test('action has right settings for cross-account/cross-region deployment', () => { + // WHEN + pipeline.addApplicationStage(new OneStackApp(app, 'CrossBoth', { env: { account: 'you', region: 'elsewhere' }})); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'CrossBoth', + Actions: [ + objectLike({ + Name: 'Stack.Prepare', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossBoth-Stack', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-cfn-exec-role-you-elsewhere' }, + }), + }), + objectLike({ + Name: 'Stack.Deploy', + RoleArn: { 'Fn::Sub': 'arn:${AWS::Partition}:iam::you:role/cdk-hnb659fds-deploy-role-you-elsewhere' }, + Region: 'elsewhere', + Configuration: objectLike({ + StackName: 'CrossBoth-Stack', + }), + }), + ], + }), + }); +}); + +// tslint:enable: max-line-length + +test('pipeline has self-mutation stage', () => { + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'UpdatePipeline', + Actions: [ + objectLike({ + Name: 'SelfMutate', + Configuration: objectLike({ + ProjectName: { Ref: anything() }, + }), + }), + ], + }), + }); + + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + install: { + commands: 'npm install -g aws-cdk', + }, + build: { + commands: arrayWith('cdk -a . deploy PipelineStack --require-approval=never --verbose'), + }, + }, + })), + Type: 'CODEPIPELINE', + }, + }); +}); + +test('selfmutation stage correctly identifies nested assembly of pipeline stack', () => { + const pipelineStage = new Stage(app, 'PipelineStage'); + const nestedPipelineStack = new Stack(pipelineStage, 'PipelineStack', { env: PIPELINE_ENV }); + new TestGitHubNpmPipeline(nestedPipelineStack, 'Cdk'); + + // THEN + expect(stackTemplate(nestedPipelineStack)).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: arrayWith('cdk -a assembly-PipelineStage deploy PipelineStage-PipelineStack --require-approval=never --verbose'), + }, + }, + })), + }, + }); +}); + +test('overridden stack names are respected', () => { + // WHEN + pipeline.addApplicationStage(new OneStackAppWithCustomName(app, 'App1')); + pipeline.addApplicationStage(new OneStackAppWithCustomName(app, 'App2')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith( + { + Name: 'App1', + Actions: arrayWith(objectLike({ + Name: 'MyFancyStack.Prepare', + Configuration: objectLike({ + StackName: 'MyFancyStack', + }), + })), + }, + { + Name: 'App2', + Actions: arrayWith(objectLike({ + Name: 'MyFancyStack.Prepare', + Configuration: objectLike({ + StackName: 'MyFancyStack', + }), + })), + }, + ), + }); +}); + +test('can control fix/CLI version used in pipeline selfupdate', () => { + // WHEN + const stack2 = new Stack(app, 'Stack2', { env: PIPELINE_ENV }); + new TestGitHubNpmPipeline(stack2, 'Cdk2', { + pipelineName: 'vpipe', + cdkCliVersion: '1.2.3', + }); + + // THEN + expect(stack2).toHaveResourceLike('AWS::CodeBuild::Project', { + Name: 'vpipe-selfupdate', + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + install: { + commands: 'npm install -g aws-cdk@1.2.3', + }, + }, + })), + }, + }); +}); + +class OneStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack'); + } +} + +class OneStackAppWithCustomName extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + new BucketStack(this, 'Stack', { + stackName: 'MyFancyStack', + }); + } +} diff --git a/packages/@aws-cdk/pipelines/test/stack-ordering.test.ts b/packages/@aws-cdk/pipelines/test/stack-ordering.test.ts new file mode 100644 index 0000000000000..e755572c78544 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/stack-ordering.test.ts @@ -0,0 +1,83 @@ +import { arrayWith, objectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import { App, Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { sortedByRunOrder } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: App; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk'); +}); + +test('interdependent stacks are in the right order', () => { + // WHEN + pipeline.addApplicationStage(new TwoStackApp(app, 'MyApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'MyApp', + Actions: sortedByRunOrder([ + objectLike({ Name: 'Stack1.Prepare' }), + objectLike({ Name: 'Stack1.Deploy' }), + objectLike({ Name: 'Stack2.Prepare' }), + objectLike({ Name: 'Stack2.Deploy' }), + ]), + }), + }); +}); + +test('multiple independent stacks go in parallel', () => { + // WHEN + pipeline.addApplicationStage(new ThreeStackApp(app, 'MyApp')); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'MyApp', + Actions: sortedByRunOrder([ + // 1 and 2 in parallel + objectLike({ Name: 'Stack1.Prepare' }), + objectLike({ Name: 'Stack2.Prepare' }), + objectLike({ Name: 'Stack1.Deploy' }), + objectLike({ Name: 'Stack2.Deploy' }), + // Then 3 + objectLike({ Name: 'Stack3.Prepare' }), + objectLike({ Name: 'Stack3.Deploy' }), + ]), + }), + }); +}); + +class TwoStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const stack2 = new BucketStack(this, 'Stack2'); + const stack1 = new BucketStack(this, 'Stack1'); + + stack2.addDependency(stack1); + } +} + +/** + * Three stacks where the last one depends on the earlier 2 + */ +class ThreeStackApp extends Stage { + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + + const stack1 = new BucketStack(this, 'Stack1'); + const stack2 = new BucketStack(this, 'Stack2'); + const stack3 = new BucketStack(this, 'Stack3'); + + stack3.addDependency(stack1); + stack3.addDependency(stack2); + } +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/test-docker-asset/Dockerfile b/packages/@aws-cdk/pipelines/test/test-docker-asset/Dockerfile new file mode 100644 index 0000000000000..d67ab4b1cc12c --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/test-docker-asset/Dockerfile @@ -0,0 +1,2 @@ +FROM scratch +RUN touch built.txt \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/test-file-asset-two.txt b/packages/@aws-cdk/pipelines/test/test-file-asset-two.txt new file mode 100644 index 0000000000000..8b1c7231bf2f4 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/test-file-asset-two.txt @@ -0,0 +1 @@ +Here's a second file asset. \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/test-file-asset.txt b/packages/@aws-cdk/pipelines/test/test-file-asset.txt new file mode 100644 index 0000000000000..95e9dcd2e3bf0 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/test-file-asset.txt @@ -0,0 +1 @@ +This is a file asset that's just here for kicks. \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/testmatchers.ts b/packages/@aws-cdk/pipelines/test/testmatchers.ts new file mode 100644 index 0000000000000..d2279fb4383b4 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/testmatchers.ts @@ -0,0 +1,121 @@ +import { exactValue, InspectionFailure, PropertyMatcher } from '@aws-cdk/assert'; + +/** + * Do a glob-like pattern match (which only supports *s) + */ +export function stringLike(pattern: string): PropertyMatcher { + // Replace * with .* in the string, escape the rest and brace with ^...$ + const regex = new RegExp(`^${pattern.split('*').map(escapeRegex).join('.*')}$`); + + return annotate({ $stringContaining: pattern }, (value: any, failure: InspectionFailure) => { + if (typeof value !== 'string') { + failure.failureReason = `Expected a string, but got '${typeof value}'`; + return false; + } + + if (!regex.test(value)) { + failure.failureReason = 'String did not match pattern'; + return false; + } + + return true; + }); +} + +/** + * Matches any value + */ +export function anything(): PropertyMatcher { + return annotate({ $anything: true }, () => true); +} + +/** + * Negate an inner matcher + */ +export function notMatching(matcher: any): PropertyMatcher { + return annotate({ $notMatching: matcher }, (value: any, failure: InspectionFailure) => { + const result = makeMatcher(matcher)(value, failure); + if (result) { + failure.failureReason = 'Should not have matched, but did'; + return false; + } + return true; + }); +} + +/** + * Sort an array (of Actions) by their RunOrder field before applying a matcher. + * + * Makes the matcher independent of the order in which the Actions get synthed + * to the template. Elements with the same RunOrder will be sorted by name. + */ +export function sortedByRunOrder(matcher: any): PropertyMatcher { + return annotate({ $sortedByRunOrder: matcher }, (value: any, failure: InspectionFailure) => { + if (!Array.isArray(value)) { + failure.failureReason = `Expected an Array, but got '${typeof value}'`; + return false; + } + + value = value.slice(); + + value.sort((a: any, b: any) => { + if (a.RunOrder !== b.RunOrder) { return a.RunOrder - b.RunOrder; } + return (a.Name as string).localeCompare(b.Name); + }); + + return makeMatcher(matcher)(value, failure); + }); +} + +/** + * Match on the innards of a JSON string, instead of the complete string + */ +export function encodedJson(matcher: any): PropertyMatcher { + return annotate({ $encodedJson: matcher }, (value: any, failure: InspectionFailure) => { + if (typeof value !== 'string') { + failure.failureReason = `Expected a string, but got '${typeof value}'`; + return false; + } + + let decoded; + try { + decoded = JSON.parse(value); + } catch (e) { + failure.failureReason = `String is not JSON: ${e}`; + return false; + } + + return makeMatcher(matcher)(decoded, failure); + }); +} + +function escapeRegex(s: string) { + return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Whether a value is a callable + */ +function isCallable(x: any): x is ((...args: any[]) => any) { + return x && {}.toString.call(x) === '[object Function]'; +} + +/** + * Turn a matcher or literal into a matcher + * + * Unfortunately I forgot to make the match() function public, so I can only accept matcher functions, not literals. + * However I can transform a literal into a matcher by using `exactValue`. + */ +function makeMatcher(matcher: any): PropertyMatcher { + return isCallable(matcher) ? matcher : exactValue(matcher); +} + +/** + * This should also have been in the upstream library + * + * Annotate a matcher with toJSON + */ +function annotate(how: A, matcher: PropertyMatcher): PropertyMatcher { + (matcher as any).toJSON = () => how; + return matcher; +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/testutil.ts b/packages/@aws-cdk/pipelines/test/testutil.ts new file mode 100644 index 0000000000000..9c87e64c502b7 --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/testutil.ts @@ -0,0 +1,106 @@ +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as codepipeline_actions from '@aws-cdk/aws-codepipeline-actions'; +import * as s3 from '@aws-cdk/aws-s3'; +import { App, AppProps, Construct, Environment, SecretValue, Stack, StackProps, Stage } from '@aws-cdk/core'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as cdkp from '../lib'; +import { assemblyBuilderOf } from '../lib/private/construct-internals'; + +export const PIPELINE_ENV: Environment = { + account: '123pipeline', + region: 'us-pipeline', +}; + +export class TestApp extends App { + constructor(props?: Partial) { + super({ + context: { + '@aws-cdk/core:newStyleStackSynthesis': '1', + }, + stackTraces: false, + autoSynth: false, + runtimeInfo: false, + treeMetadata: false, + ...props, + }); + } + + public cleanup() { + rimraf(assemblyBuilderOf(this).outdir); + } +} + +export class TestGitHubNpmPipeline extends cdkp.CdkPipeline { + public readonly sourceArtifact: codepipeline.Artifact; + public readonly cloudAssemblyArtifact: codepipeline.Artifact; + + constructor(scope: Construct, id: string, props?: Partial & { readonly sourceArtifact?: codepipeline.Artifact } ) { + const sourceArtifact = props?.sourceArtifact ?? new codepipeline.Artifact(); + const cloudAssemblyArtifact = props?.cloudAssemblyArtifact ?? new codepipeline.Artifact(); + + super(scope, id, { + sourceAction: new codepipeline_actions.GitHubSourceAction({ + actionName: 'GitHub', + output: sourceArtifact, + oauthToken: SecretValue.plainText('$3kr1t'), + owner: 'test', + repo: 'test', + trigger: codepipeline_actions.GitHubTrigger.POLL, + }), + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + }), + cloudAssemblyArtifact, + ...props, + }); + + this.sourceArtifact = sourceArtifact; + this.cloudAssemblyArtifact = cloudAssemblyArtifact; + } +} + +/** + * A test stack + * + * It contains a single Bucket. Such robust. Much uptime. + */ +export class BucketStack extends Stack { + public readonly bucket: s3.IBucket; + + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + this.bucket = new s3.Bucket(this, 'Bucket'); + } +} + +/** + * rm -rf reimplementation, don't want to depend on an NPM package for this + */ +export function rimraf(fsPath: string) { + try { + const isDir = fs.lstatSync(fsPath).isDirectory(); + + if (isDir) { + for (const file of fs.readdirSync(fsPath)) { + rimraf(path.join(fsPath, file)); + } + fs.rmdirSync(fsPath); + } else { + fs.unlinkSync(fsPath); + } + } catch (e) { + // We will survive ENOENT + if (e.code !== 'ENOENT') { throw e; } + } +} + +/** + * Because 'expect(stack)' doesn't work correctly for stacks in nested assemblies + */ +export function stackTemplate(stack: Stack) { + const stage = Stage.of(stack); + if (!stage) { throw new Error('stack not in a Stage'); } + return stage.synth().getStackArtifact(stack.artifactId); +} \ No newline at end of file diff --git a/packages/@aws-cdk/pipelines/test/validation.test.ts b/packages/@aws-cdk/pipelines/test/validation.test.ts new file mode 100644 index 0000000000000..e2c16e8e67bfc --- /dev/null +++ b/packages/@aws-cdk/pipelines/test/validation.test.ts @@ -0,0 +1,178 @@ +import { _objectContaining, arrayWith, deepObjectLike } from '@aws-cdk/assert'; +import '@aws-cdk/assert/jest'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import { CfnOutput, Construct, Stack, Stage, StageProps } from '@aws-cdk/core'; +import * as cdkp from '../lib'; +import { anything, encodedJson } from './testmatchers'; +import { BucketStack, PIPELINE_ENV, TestApp, TestGitHubNpmPipeline } from './testutil'; + +let app: TestApp; +let pipelineStack: Stack; +let pipeline: cdkp.CdkPipeline; +let sourceArtifact: codepipeline.Artifact; +let cloudAssemblyArtifact: codepipeline.Artifact; +let integTestArtifact: codepipeline.Artifact; + +beforeEach(() => { + app = new TestApp(); + pipelineStack = new Stack(app, 'PipelineStack', { env: PIPELINE_ENV }); + sourceArtifact = new codepipeline.Artifact(); + cloudAssemblyArtifact = new codepipeline.Artifact('CloudAsm'); + integTestArtifact = new codepipeline.Artifact('IntegTests'); + pipeline = new TestGitHubNpmPipeline(pipelineStack, 'Cdk', { + sourceArtifact, + cloudAssemblyArtifact, + synthAction: cdkp.SimpleSynthAction.standardNpmSynth({ + sourceArtifact, + cloudAssemblyArtifact, + additionalArtifacts: [{ directory: 'test', artifact: integTestArtifact }], + }), + }); +}); + +afterEach(() => { + app.cleanup(); +}); + +test('can use stack outputs as validation inputs', () => { + // GIVEN + const stage = new AppWithStackOutput(app, 'MyApp'); + + // WHEN + const pipeStage = pipeline.addApplicationStage(stage); + pipeStage.addActions(new cdkp.ShellScriptAction({ + actionName: 'TestOutput', + useOutputs: { + BUCKET_NAME: pipeline.stackOutput(stage.output), + }, + commands: ['echo $BUCKET_NAME'], + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'MyApp', + Actions: arrayWith( + deepObjectLike({ + Name: 'Stack.Deploy', + OutputArtifacts: [{ Name: anything() }], + Configuration: { + OutputFileName: 'outputs.json', + }, + }), + deepObjectLike({ + ActionTypeId: { + Provider: 'CodeBuild', + }, + Configuration: { + ProjectName: anything(), + }, + InputArtifacts: [{ Name: anything() }], + Name: 'TestOutput', + }), + ), + }), + }); + + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: [ + 'set -eu', + 'export BUCKET_NAME="$(node -pe \'require(process.env.CODEBUILD_SRC_DIR + "/outputs.json")["BucketName"]\')"', + 'echo $BUCKET_NAME', + ], + }, + }, + })), + Type: 'CODEPIPELINE', + }, + }); +}); + +test('can use additional files from source', () => { + // WHEN + pipeline.addStage('Test').addActions(new cdkp.ShellScriptAction({ + actionName: 'UseSources', + additionalArtifacts: [sourceArtifact], + commands: ['true'], + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Test', + Actions: [ + deepObjectLike({ + Name: 'UseSources', + InputArtifacts: [ { Name: 'Artifact_Source_GitHub' } ], + }), + ], + }), + }); + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: [ + 'set -eu', + 'true', + ], + }, + }, + })), + }, + }); +}); + +test('can use additional files from build', () => { + // WHEN + pipeline.addStage('Test').addActions(new cdkp.ShellScriptAction({ + actionName: 'UseBuildArtifact', + additionalArtifacts: [integTestArtifact], + commands: ['true'], + })); + + // THEN + expect(pipelineStack).toHaveResourceLike('AWS::CodePipeline::Pipeline', { + Stages: arrayWith({ + Name: 'Test', + Actions: [ + deepObjectLike({ + Name: 'UseBuildArtifact', + InputArtifacts: [ { Name: 'IntegTests' } ], + }), + ], + }), + }); + expect(pipelineStack).toHaveResourceLike('AWS::CodeBuild::Project', { + Source: { + BuildSpec: encodedJson(deepObjectLike({ + phases: { + build: { + commands: [ + 'set -eu', + 'true', + ], + }, + }, + })), + }, + }); +}); + +class AppWithStackOutput extends Stage { + public readonly output: CfnOutput; + + constructor(scope: Construct, id: string, props?: StageProps) { + super(scope, id, props); + const stack = new BucketStack(this, 'Stack'); + + this.output = new CfnOutput(stack, 'BucketName', { + value: stack.bucket.bucketName, + }); + } +} \ No newline at end of file diff --git a/packages/aws-cdk/bin/cdk.ts b/packages/aws-cdk/bin/cdk.ts index c40c914714187..79b90682dc5c4 100644 --- a/packages/aws-cdk/bin/cdk.ts +++ b/packages/aws-cdk/bin/cdk.ts @@ -13,7 +13,7 @@ import { execProgram } from '../lib/api/cxapp/exec'; import { CdkToolkit } from '../lib/cdk-toolkit'; import { RequireApproval } from '../lib/diff'; import { availableInitLanguages, cliInit, printAvailableTemplates } from '../lib/init'; -import { data, debug, error, setLogLevel } from '../lib/logging'; +import { data, debug, error, print, setLogLevel } from '../lib/logging'; import { PluginHost } from '../lib/plugin'; import { serializeStructure } from '../lib/serialize'; import { Configuration, Settings } from '../lib/settings'; @@ -227,9 +227,20 @@ async function initCommandLine() { }); case 'bootstrap': + // Use new bootstrapping if it's requested via environment variable, or if + // new style stack synthesis has been configured in `cdk.json`. + let useNewBootstrapping = false; + if (process.env.CDK_NEW_BOOTSTRAP) { + print('CDK_NEW_BOOTSTRAP set, using new-style bootstrapping'); + useNewBootstrapping = true; + } else if (configuration.context.get(cxapi.NEW_STYLE_STACK_SYNTHESIS_CONTEXT)) { + print(`'${cxapi.NEW_STYLE_STACK_SYNTHESIS_CONTEXT}' context set, using new-style bootstrapping`); + useNewBootstrapping = true; + } + return await cli.bootstrap(args.ENVIRONMENTS, toolkitStackName, args.roleArn, - !!process.env.CDK_NEW_BOOTSTRAP, + useNewBootstrapping, argv.force, { bucketName: configuration.settings.get(['toolkitBucket', 'bucketName']), diff --git a/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py b/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py index afc07081c834a..910a4b838c933 100644 --- a/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py +++ b/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py @@ -6,6 +6,7 @@ core ) + class %name.PascalCased%Stack(core.Stack): def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: diff --git a/packages/aws-cdk/package.json b/packages/aws-cdk/package.json index bca680bdc7e8b..5dd94f107f546 100644 --- a/packages/aws-cdk/package.json +++ b/packages/aws-cdk/package.json @@ -71,7 +71,7 @@ "@aws-cdk/cx-api": "0.0.0", "@aws-cdk/region-info": "0.0.0", "archiver": "^4.0.1", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "camelcase": "^6.0.0", "cdk-assets": "0.0.0", "colors": "^1.4.0", diff --git a/packages/cdk-assets/package.json b/packages/cdk-assets/package.json index 01283e8eb5778..e95d6ee415fc2 100644 --- a/packages/cdk-assets/package.json +++ b/packages/cdk-assets/package.json @@ -47,7 +47,7 @@ "@aws-cdk/cloud-assembly-schema": "0.0.0", "@aws-cdk/cx-api": "0.0.0", "archiver": "^4.0.1", - "aws-sdk": "^2.710.0", + "aws-sdk": "^2.712.0", "glob": "^7.1.6", "yargs": "^15.3.1" }, diff --git a/packages/decdk/package.json b/packages/decdk/package.json index 59b0ad777f07e..6e698e76cb08f 100644 --- a/packages/decdk/package.json +++ b/packages/decdk/package.json @@ -28,6 +28,7 @@ "license": "Apache-2.0", "dependencies": { "@aws-cdk/alexa-ask": "0.0.0", + "@aws-cdk/pipelines": "0.0.0", "@aws-cdk/app-delivery": "0.0.0", "@aws-cdk/assets": "0.0.0", "@aws-cdk/aws-accessanalyzer": "0.0.0", diff --git a/packages/monocdk-experiment/package.json b/packages/monocdk-experiment/package.json index 71a5dd88067b3..dde22f6a52f6e 100644 --- a/packages/monocdk-experiment/package.json +++ b/packages/monocdk-experiment/package.json @@ -247,6 +247,7 @@ "@aws-cdk/core": "0.0.0", "@aws-cdk/custom-resources": "0.0.0", "@aws-cdk/cx-api": "0.0.0", + "@aws-cdk/pipelines": "0.0.0", "@aws-cdk/region-info": "0.0.0", "@types/fs-extra": "^8.1.1", "@types/node": "^10.17.26", diff --git a/yarn.lock b/yarn.lock index 7ec153df3ffd6..9427b02b2d20f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2160,10 +2160,10 @@ aws-sdk-mock@^5.1.0: sinon "^9.0.1" traverse "^0.6.6" -aws-sdk@^2.637.0, aws-sdk@^2.710.0: - version "2.710.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.710.0.tgz#82c086587679382f80dfad743da7f0582fcd381b" - integrity sha512-GQTPH0DzJMpvvtZ3VO+grkKVdL/nqjWsIfcVf1c3oedvEjW24wSXQEs6KWAGbpG2jbHsYKH7kZ4XXuq428LVAw== +aws-sdk@^2.637.0, aws-sdk@^2.712.0: + version "2.712.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.712.0.tgz#828d6ef7556f4b5098c880469ff72195a83f9d66" + integrity sha512-C3SLWanFydoWJwtKNi73BG9uB6UzrUuECaAiplOEVBltO/R4sBsHWhwTBuxS02eTNdRrgulu19bJ5RWt+OuXiA== dependencies: buffer "4.9.2" events "1.1.1" @@ -4173,10 +4173,10 @@ extsprintf@^1.2.0: resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= -fast-check@^1.25.1: - version "1.25.1" - resolved "https://registry.yarnpkg.com/fast-check/-/fast-check-1.25.1.tgz#aab6e34496a23ba7d7d20188699d9abdcaaa2dcc" - integrity sha512-4lyIDY2YKpSiPXpceCQBTfDxLh/7/C3OHgvzToea3y1YAlv38Wz9mfIsu+MD4go0NX3ow/g98kEmlW00+CoH3w== +fast-check@^1.26.0: + version "1.26.0" + resolved "https://registry.yarnpkg.com/fast-check/-/fast-check-1.26.0.tgz#3a85998a9c30ed7f58976276e06046645e0de18a" + integrity sha512-B1AjSfe0bmi6FdFIzmrrGSjrsF6e2MCmZiM6zJaRbBMP+gIvdNakle5FIMKi0xbS9KlN9BZho1R7oB/qoNIQuA== dependencies: pure-rand "^2.0.0" tslib "^2.0.0" @@ -6474,10 +6474,10 @@ lodash.uniq@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= -lodash@^4.17.12, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.2.1: - version "4.17.15" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" - integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== +lodash@^4.17.12, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.2.1: + version "4.17.19" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b" + integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ== log-driver@^1.2.7: version "1.2.7"