Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add incremental export support for aws_dynamodb_table_export #41303

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/41303.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
resource/aws_dynamodb_table_export: Add `export_type` and `incremental_export_specification` arguments
```
96 changes: 96 additions & 0 deletions internal/service/dynamodb/table_export.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,44 @@ func resourceTableExport() *schema.Resource {
ForceNew: true,
ValidateFunc: verify.ValidUTCTimestamp,
},
"export_type": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateDiagFunc: enum.Validate[awstypes.ExportType](),
},
"incremental_export_specification": {
Type: schema.TypeList,
Optional: true,
ForceNew: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"export_from_time": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateFunc: verify.ValidUTCTimestamp,
},
"export_to_time": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateFunc: verify.ValidUTCTimestamp,
},
"export_view_type": {
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateDiagFunc: enum.Validate[awstypes.ExportViewType](),
},
},
},
},
"item_count": {
Type: schema.TypeInt,
Computed: true,
Expand Down Expand Up @@ -145,6 +183,14 @@ func resourceTableExportCreate(ctx context.Context, d *schema.ResourceData, meta
input.ExportTime = aws.Time(v)
}

if v, ok := d.GetOk("export_type"); ok {
input.ExportType = awstypes.ExportType(v.(string))
}

if v, ok := d.GetOk("incremental_export_specification"); ok {
input.IncrementalExportSpecification = expandIncrementalExportSpecification(v.([]interface{}))
}

if v, ok := d.GetOk("s3_bucket_owner"); ok {
input.S3BucketOwner = aws.String(v.(string))
}
Expand Down Expand Up @@ -202,6 +248,8 @@ func resourceTableExportRead(ctx context.Context, d *schema.ResourceData, meta i
if desc.ExportTime != nil {
d.Set("export_time", aws.ToTime(desc.ExportTime).Format(time.RFC3339))
}
d.Set("export_type", desc.ExportType)
d.Set("incremental_export_specification", flattenIncrementalExportSpecification(desc.IncrementalExportSpecification))
d.Set("item_count", desc.ItemCount)
d.Set("manifest_files_s3_key", desc.ExportManifest)
d.Set(names.AttrS3Bucket, desc.S3Bucket)
Expand All @@ -217,6 +265,54 @@ func resourceTableExportRead(ctx context.Context, d *schema.ResourceData, meta i
return diags
}

func expandIncrementalExportSpecification(d interface{}) *awstypes.IncrementalExportSpecification {
if d.([]interface{}) == nil || len(d.([]interface{})) == 0 {
return nil
}

dMap := d.([]interface{})[0].(map[string]interface{})

spec := &awstypes.IncrementalExportSpecification{}

if s, ok := dMap["export_from_time"].(string); ok && s != "" {
v, _ := time.Parse(time.RFC3339, s)
spec.ExportFromTime = aws.Time(v)
}

if s, ok := dMap["export_to_time"].(string); ok && s != "" {
v, _ := time.Parse(time.RFC3339, s)
spec.ExportToTime = aws.Time(v)
}

if v, ok := dMap["export_view_type"].(string); ok && v != "" {
spec.ExportViewType = awstypes.ExportViewType(v)
}

return spec
}

func flattenIncrementalExportSpecification(apiObject *awstypes.IncrementalExportSpecification) []interface{} {
if apiObject == nil {
return []interface{}{}
}

m := map[string]interface{}{}

if v := apiObject.ExportFromTime; v != nil {
m["export_from_time"] = aws.ToTime(v).Format(time.RFC3339)
}

if v := apiObject.ExportToTime; v != nil {
m["export_to_time"] = aws.ToTime(v).Format(time.RFC3339)
}

if v := string(apiObject.ExportViewType); v != "" {
m["export_view_type"] = v
}

return []interface{}{m}
}

func findTableExportByARN(ctx context.Context, conn *dynamodb.Client, arn string) (*awstypes.ExportDescription, error) {
input := &dynamodb.DescribeExportInput{
ExportArn: aws.String(arn),
Expand Down
124 changes: 118 additions & 6 deletions internal/service/dynamodb/table_export_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"context"
"fmt"
"testing"
"time"

"github.com/YakDriver/regexache"
"github.com/aws/aws-sdk-go-v2/service/dynamodb"
Expand All @@ -26,7 +27,7 @@ func TestAccDynamoDBTableExport_basic(t *testing.T) {
t.Skip("skipping long-running test in short mode")
}

var tableexport awstypes.ExportDescription
var tableExport awstypes.ExportDescription
rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
resourceName := "aws_dynamodb_table_export.test"
s3BucketResourceName := "aws_s3_bucket.test"
Expand All @@ -40,9 +41,11 @@ func TestAccDynamoDBTableExport_basic(t *testing.T) {
{
Config: testAccTableExportConfig_basic(rName),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckTableExportExists(ctx, resourceName, &tableexport),
testAccCheckTableExportExists(ctx, resourceName, &tableExport),
resource.TestCheckResourceAttr(resourceName, "export_format", "DYNAMODB_JSON"),
resource.TestCheckResourceAttr(resourceName, "export_status", "COMPLETED"),
resource.TestCheckResourceAttr(resourceName, "export_type", ""),
resource.TestCheckResourceAttr(resourceName, "incremental_export_specification.#", "0"),
resource.TestCheckResourceAttr(resourceName, "item_count", "0"),
resource.TestCheckResourceAttrPair(resourceName, names.AttrS3Bucket, s3BucketResourceName, names.AttrID),
resource.TestCheckResourceAttr(resourceName, "s3_bucket_owner", ""),
Expand Down Expand Up @@ -74,7 +77,7 @@ func TestAccDynamoDBTableExport_kms(t *testing.T) {
t.Skip("skipping long-running test in short mode")
}

var tableexport awstypes.ExportDescription
var tableExport awstypes.ExportDescription
rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
resourceName := "aws_dynamodb_table_export.test"
s3BucketResourceName := "aws_s3_bucket.test"
Expand All @@ -93,9 +96,11 @@ func TestAccDynamoDBTableExport_kms(t *testing.T) {
{
Config: testAccTableExportConfig_kms(rName),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckTableExportExists(ctx, resourceName, &tableexport),
testAccCheckTableExportExists(ctx, resourceName, &tableExport),
resource.TestCheckResourceAttr(resourceName, "export_format", "DYNAMODB_JSON"),
resource.TestCheckResourceAttr(resourceName, "export_status", "COMPLETED"),
resource.TestCheckResourceAttr(resourceName, "export_type", "FULL_EXPORT"),
resource.TestCheckResourceAttr(resourceName, "incremental_export_specification.#", "0"),
resource.TestCheckResourceAttr(resourceName, "item_count", "0"),
resource.TestCheckResourceAttrPair(resourceName, names.AttrS3Bucket, s3BucketResourceName, names.AttrID),
resource.TestCheckResourceAttr(resourceName, "s3_bucket_owner", ""),
Expand Down Expand Up @@ -127,7 +132,7 @@ func TestAccDynamoDBTableExport_s3Prefix(t *testing.T) {
t.Skip("skipping long-running test in short mode")
}

var tableexport awstypes.ExportDescription
var tableExport awstypes.ExportDescription
rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
resourceName := "aws_dynamodb_table_export.test"
s3BucketResourceName := "aws_s3_bucket.test"
Expand All @@ -145,10 +150,12 @@ func TestAccDynamoDBTableExport_s3Prefix(t *testing.T) {
{
Config: testAccTableExportConfig_s3Prefix(rName, "test"),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckTableExportExists(ctx, resourceName, &tableexport),
testAccCheckTableExportExists(ctx, resourceName, &tableExport),
resource.TestCheckResourceAttr(resourceName, "export_format", "DYNAMODB_JSON"),
resource.TestCheckResourceAttr(resourceName, "export_status", "COMPLETED"),
resource.TestCheckResourceAttr(resourceName, "export_type", ""),
resource.TestCheckResourceAttr(resourceName, "item_count", "0"),
resource.TestCheckResourceAttr(resourceName, "incremental_export_specification.#", "0"),
resource.TestCheckResourceAttrPair(resourceName, names.AttrS3Bucket, s3BucketResourceName, names.AttrID),
resource.TestCheckResourceAttr(resourceName, "s3_bucket_owner", ""),
resource.TestCheckResourceAttr(resourceName, "s3_prefix", "test"),
Expand All @@ -173,6 +180,83 @@ func TestAccDynamoDBTableExport_s3Prefix(t *testing.T) {
})
}

func TestAccDynamoDBTableExport_incrementalExport(t *testing.T) {
ctx := acctest.Context(t)
if testing.Short() {
t.Skip("skipping long-running test in short mode")
}

var tableExport awstypes.ExportDescription
rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
resourceName := "aws_dynamodb_table_export.test"
timeResourceName := "time_static.table_create"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acctest.PreCheck(ctx, t) },
ErrorCheck: acctest.ErrorCheck(t, names.DynamoDBServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5FactoriesAlternate(ctx, t),
ExternalProviders: map[string]resource.ExternalProvider{
"time": {
Source: "hashicorp/time",
VersionConstraint: "0.12.1",
},
},
CheckDestroy: acctest.CheckDestroyNoop,
Steps: []resource.TestStep{
{
Config: testAccTableExportConfig_incrementalExport(rName, "time_static.table_create.rfc3339", "null", "null"),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckTableExportExists(ctx, resourceName, &tableExport),
resource.TestCheckResourceAttr(resourceName, "export_status", "COMPLETED"),
resource.TestCheckResourceAttr(resourceName, "export_type", "INCREMENTAL_EXPORT"),
resource.TestCheckResourceAttr(resourceName, "incremental_export_specification.#", "1"),
resource.TestCheckResourceAttrPair(resourceName, "incremental_export_specification.0.export_from_time", timeResourceName, "rfc3339"),
resource.TestCheckResourceAttrSet(resourceName, "incremental_export_specification.0.export_to_time"),
resource.TestCheckResourceAttr(resourceName, "incremental_export_specification.0.export_view_type", "NEW_AND_OLD_IMAGES"),
resource.TestCheckResourceAttrSet(resourceName, names.AttrStartTime),
resource.TestCheckResourceAttrSet(resourceName, "end_time"),
acctest.CheckResourceAttrRegionalARN(ctx, resourceName, "table_arn", "dynamodb", fmt.Sprintf("table/%s", rName)),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccTableExportConfig_incrementalExport(rName, "time_static.table_create.rfc3339", "timeadd(time_static.table_create.rfc3339, \"15m\")", "\"NEW_IMAGE\""),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckTableExportExists(ctx, resourceName, &tableExport),
resource.TestCheckResourceAttr(resourceName, "export_status", "COMPLETED"),
resource.TestCheckResourceAttr(resourceName, "export_type", "INCREMENTAL_EXPORT"),
resource.TestCheckResourceAttr(resourceName, "incremental_export_specification.#", "1"),
resource.TestCheckResourceAttrPair(resourceName, "incremental_export_specification.0.export_from_time", timeResourceName, "rfc3339"),
resource.TestCheckResourceAttrWith(resourceName, "incremental_export_specification.0.export_to_time", func(value string) error {
exportFromTime := tableExport.IncrementalExportSpecification.ExportFromTime
if exportFromTime == nil {
return fmt.Errorf("expected export_from_time to be set")
}
expectedValue := exportFromTime.Add(15 * time.Minute).Format(time.RFC3339)
if value != expectedValue {
return fmt.Errorf("value expected to be %s, got %s", expectedValue, value)
}
return nil
}),
resource.TestCheckResourceAttr(resourceName, "incremental_export_specification.0.export_view_type", "NEW_IMAGE"),
resource.TestCheckResourceAttrSet(resourceName, names.AttrStartTime),
resource.TestCheckResourceAttrSet(resourceName, "end_time"),
acctest.CheckResourceAttrRegionalARN(ctx, resourceName, "table_arn", "dynamodb", fmt.Sprintf("table/%s", rName)),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}

func testAccCheckTableExportExists(ctx context.Context, n string, v *awstypes.ExportDescription) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
Expand Down Expand Up @@ -250,6 +334,7 @@ resource "aws_kms_key" "test" {
}

resource "aws_dynamodb_table_export" "test" {
export_type = "FULL_EXPORT"
s3_bucket = aws_s3_bucket.test.id
s3_sse_kms_key_id = aws_kms_key.test.id
s3_sse_algorithm = "KMS"
Expand All @@ -266,3 +351,30 @@ resource "aws_dynamodb_table_export" "test" {
table_arn = aws_dynamodb_table.test.arn
}`, s3BucketPrefix))
}

func testAccTableExportConfig_incrementalExport(tableName, exportFromTime, exportToTime, exportViewType string) string {
return acctest.ConfigCompose(testAccTableExportConfig_baseConfig(tableName), fmt.Sprintf(`
resource "time_static" "table_create" {
depends_on = [aws_dynamodb_table.test]
}

resource "time_sleep" "wait_pitr_min" {
create_duration = "16m"
depends_on = [time_static.table_create]
}

resource "aws_dynamodb_table_export" "test" {
export_type = "INCREMENTAL_EXPORT"
s3_bucket = aws_s3_bucket.test.id
table_arn = aws_dynamodb_table.test.arn

incremental_export_specification {
export_from_time = %[1]s
export_to_time = %[2]s
export_view_type = %[3]s
}

depends_on = [time_sleep.wait_pitr_min]
}
`, exportFromTime, exportToTime, exportViewType))
}
27 changes: 26 additions & 1 deletion website/docs/r/dynamodb_table_export.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,21 @@ resource "aws_dynamodb_table_export" "example" {
}
```

### Incremental export

```terraform
resource "aws_dynamodb_table_export" "example" {
export_type = "INCREMENTAL_EXPORT"
s3_bucket = aws_s3_bucket.example.id
table_arn = aws_dynamodb_table.example.arn

incremental_export_specification {
export_from_time = "2025-02-09T12:00:00+01:00"
export_to_time = "2025-02-09T13:00:00+01:00"
}
}
```

## Argument Reference

The following arguments are required:
Expand All @@ -66,13 +81,23 @@ The following arguments are required:

The following arguments are optional:

* `export_format` - (Optional, Forces new resource) Format for the exported data. Valid values are `DYNAMODB_JSON` or `ION`. See the [AWS Documentation](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/S3DataExport.Output.html#S3DataExport.Output_Data) for more information on these export formats. Default is `DYNAMODB_JSON`.
* `export_format` - (Optional, Forces new resource) Format for the exported data. Valid values are: `DYNAMODB_JSON`, `ION`. See the [AWS Documentation](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/S3DataExport.Output.html#S3DataExport.Output_Data) for more information on these export formats. Default is `DYNAMODB_JSON`.
* `export_time` - (Optional, Forces new resource) Time in RFC3339 format from which to export table data. The table export will be a snapshot of the table's state at this point in time. Omitting this value will result in a snapshot from the current time.
* `export_type` - (Optional, Forces new resource) Whether to execute as a full export or incremental export. Valid values are: `FULL_EXPORT`, `INCREMENTAL_EXPORT`. Defaults to `FULL_EXPORT`. If `INCREMENTAL_EXPORT` is provided, the `incremental_export_specification` argument must also be provided.
`incremental_export_specification` - (Optional, Forces new resource) Parameters specific to an incremental export. See [`incremental_export_specification` Block](#incremental_export_specification-block) for details.
* `s3_bucket_owner` - (Optional, Forces new resource) ID of the AWS account that owns the bucket the export will be stored in.
* `s3_prefix` - (Optional, Forces new resource) Amazon S3 bucket prefix to use as the file name and path of the exported snapshot.
* `s3_sse_algorithm` - (Optional, Forces new resource) Type of encryption used on the bucket where export data will be stored. Valid values are: `AES256`, `KMS`.
* `s3_sse_kms_key_id` - (Optional, Forces new resource) ID of the AWS KMS managed key used to encrypt the S3 bucket where export data will be stored (if applicable).

### `incremental_export_specification` Block

The `incremental_export_specification` configuration block supports the following arguments:

`export_from_time` - (Optional, Forces new resource) Time in the past which provides the inclusive start range for the export table's data, counted in seconds from the start of the Unix epoch. The incremental export will reflect the table's state including and after this point in time.
`export_to_time` - (Optional, Forces new resource) Time in the past which provides the exclusive end range for the export table's data, counted in seconds from the start of the Unix epoch. The incremental export will reflect the table's state just prior to this point in time. If this is not provided, the latest time with data available will be used.
`export_view_type` - (Optional, Forces new resource) View type that was chosen for the export. Valid values are: `NEW_AND_OLD_IMAGES`, `NEW_IMAGES`. Defaults to `NEW_AND_OLD_IMAGES`.

## Attribute Reference

This resource exports the following attributes in addition to the arguments above:
Expand Down
Loading