Skip to content

Commit

Permalink
Enforce consistent naming for resource files (databricks#1366)
Browse files Browse the repository at this point in the history
* Fixed README.md
* Enforced consistent naming for resources and files
* Added provider/completeness.md to track documentation and testing coverage
  • Loading branch information
nfx authored Jun 16, 2022
1 parent 9f64d84 commit 1cbdf62
Show file tree
Hide file tree
Showing 78 changed files with 971 additions and 569 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -340,4 +340,6 @@ tf.log

scripts/tt

.metals
.metals

provider/completeness.md
12 changes: 12 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
| [databricks_dbfs_file](docs/resources/dbfs_file.md)
| [databricks_dbfs_file_paths](docs/data-sources/dbfs_file_paths.md) data
| [databricks_dbfs_file](docs/data-sources/dbfs_file.md) data
| [databricks_directory](docs/resources/directory.md)
| [databricks_external_location](docs/resources/external_location.md)
| [databricks_git_credential](docs/resources/git_credential.md)
| [databricks_global_init_script](docs/resources/global_init_script.md)
| [databricks_grants](docs/resources/grants.md)
| [databricks_group](docs/resources/group.md)
Expand All @@ -31,17 +33,22 @@
| [databricks_instance_profile](docs/resources/instance_profile.md)
| [databricks_ip_access_list](docs/resources/ip_access_list.md)
| [databricks_job](docs/resources/job.md)
| [databricks_jobs](docs/data-sources/jobs.md)
| [databricks_library](docs/resources/library.md)
| [databricks_metastore](docs/resources/metastore.md)
| [databricks_metastore_assignment](docs/resources/metastore_assignment.md)
| [databricks_metastore_data_access](docs/resources/metastore_data_access.md)
| [databricks_mlflow_model](docs/resources/mlflow_model.md)
| [databricks_mlflow_experiment](docs/resources/mlflow_experiment.md)
| [databricks_mlflow_webhook](docs/resources/mlflow_webhook.md)
| [databricks_mount](docs/resources/mount.md)
| [databricks_mws_credentials](docs/resources/mws_credentials.md)
| [databricks_mws_customer_managed_keys](docs/resources/mws_customer_managed_keys.md)
| [databricks_mws_log_delivery](docs/resources/mws_log_delivery.md)
| [databricks_mws_networks](docs/resources/mws_networks.md)
| [databricks_mws_private_access_settings](docs/resources/mws_private_access_settings.md)
| [databricks_mws_storage_configurations](docs/resources/mws_storage_configurations.md)
| [databricks_mws_vpc_endpoint](docs/resources/mws_vpc_endpoint.md)
| [databricks_mws_workspaces](docs/resources/mws_workspaces.md)
| [databricks_node_type](docs/data-sources/node_type.md) data
| [databricks_notebook](docs/resources/notebook.md)
Expand All @@ -56,6 +63,8 @@
| [databricks_secret](docs/resources/secret.md)
| [databricks_secret_acl](docs/resources/secret_acl.md)
| [databricks_secret_scope](docs/resources/secret_scope.md)
| [databricks_service_principal](docs/resources/service_principal.md)
| [databricks_service_principal_role](docs/resources/service_principal_role.md)
| [databricks_spark_version](docs/data-sources/spark_version.md) data
| [databricks_sql_dashboard](docs/resources/sql_dashboard.md)
| [databricks_sql_endpoint](docs/resources/sql_endpoint.md)
Expand All @@ -69,8 +78,11 @@
| [databricks_tables](docs/data-sources/table.md) data
| [databricks_token](docs/resources/token.md)
| [databricks_user](docs/resources/user.md)
| [databricks_user_role](docs/resources/user_role.md)
| [databricks_user_instance_profile](docs/resources/user_instance_profile.md)
| [databricks_views](docs/data-sources/views.md) data
| [databricks_workspace_conf](docs/resources/workspace_conf.md)
| [databricks_zones](docs/data-sources/zones.md)
| [Contributing and Development Guidelines](CONTRIBUTING.md)

[![build](https://github.com/databrickslabs/terraform-provider-databricks/workflows/build/badge.svg?branch=master)](https://github.com/databrickslabs/terraform-provider-databricks/actions?query=workflow%3Abuild+branch%3Amaster) [![codecov](https://codecov.io/gh/databrickslabs/terraform-provider-databricks/branch/master/graph/badge.svg)](https://codecov.io/gh/databrickslabs/terraform-provider-databricks) ![lines](https://img.shields.io/tokei/lines/github/databrickslabs/terraform-provider-databricks) [![downloads](https://img.shields.io/github/downloads/databrickslabs/terraform-provider-databricks/total.svg)](https://hanadigital.github.io/grev/?user=databrickslabs&repo=terraform-provider-databricks)
Expand Down
File renamed without changes.
File renamed without changes.
91 changes: 91 additions & 0 deletions aws/data_aws_assume_role_policy.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
package aws

import (
"context"
"encoding/json"
"fmt"

"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

type awsIamPolicy struct {
Version string `json:"Version,omitempty"`
ID string `json:"Id,omitempty"`
Statements []*awsIamPolicyStatement `json:"Statement"`
}

type awsIamPolicyStatement struct {
Sid string `json:"Sid,omitempty"`
Effect string `json:"Effect,omitempty"`
Actions interface{} `json:"Action,omitempty"`
NotActions interface{} `json:"NotAction,omitempty"`
Resources interface{} `json:"Resource,omitempty"`
NotResources interface{} `json:"NotResource,omitempty"`
Principal map[string]string `json:"Principal,omitempty"`
Condition map[string]map[string]string `json:"Condition,omitempty"`
}


// DataAwsAssumeRolePolicy ...
func DataAwsAssumeRolePolicy() *schema.Resource {
return &schema.Resource{
ReadContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
externalID := d.Get("external_id").(string)
policy := awsIamPolicy{
Version: "2012-10-17",
Statements: []*awsIamPolicyStatement{
{
Effect: "Allow",
Actions: "sts:AssumeRole",
Condition: map[string]map[string]string{
"StringEquals": {
"sts:ExternalId": externalID,
},
},
Principal: map[string]string{
"AWS": fmt.Sprintf("arn:aws:iam::%s:root", d.Get("databricks_account_id").(string)),
},
},
},
}
if v, ok := d.GetOk("for_log_delivery"); ok {
if v.(bool) {
// this is production UsageDelivery IAM role, that is considered a constant
logDeliveryARN := "arn:aws:iam::414351767826:role/SaasUsageDeliveryRole-prod-IAMRole-3PLHICCRR1TK"
policy.Statements[0].Principal["AWS"] = logDeliveryARN
}
}
policyJSON, err := json.MarshalIndent(policy, "", " ")
if err != nil {
return diag.FromErr(err)
}
d.SetId(externalID)
// nolint
d.Set("json", string(policyJSON))
return nil
},
Schema: map[string]*schema.Schema{
"databricks_account_id": {
Type: schema.TypeString,
Default: "414351767826",
Optional: true,
},
"for_log_delivery": {
Type: schema.TypeBool,
Description: "Grant AssumeRole to Databricks SaasUsageDeliveryRole instead of root account",
Optional: true,
Default: false,
},
"external_id": {
Type: schema.TypeString,
Required: true,
},
"json": {
Type: schema.TypeString,
Computed: true,
ForceNew: true,
},
},
}
}
21 changes: 21 additions & 0 deletions aws/data_aws_assume_role_policy_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package aws

import (
"testing"

"github.com/databrickslabs/terraform-provider-databricks/qa"
"github.com/stretchr/testify/assert"
)

func TestDataAwsAssumeRolePolicy(t *testing.T) {
d, err := qa.ResourceFixture{
Read: true,
Resource: DataAwsAssumeRolePolicy(),
NonWritable: true,
ID: ".",
HCL: `external_id = "abc"`,
}.Apply(t)
assert.NoError(t, err)
j := d.Get("json")
assert.Lenf(t, j, 299, "Strange length for policy: %s", j)
}
78 changes: 78 additions & 0 deletions aws/data_aws_bucket_policy.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
package aws

import (
"context"
"encoding/json"
"fmt"
"regexp"

"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)

// DataAwsBucketPolicy ...
func DataAwsBucketPolicy() *schema.Resource {
return &schema.Resource{
ReadContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
bucket := d.Get("bucket").(string)
policy := awsIamPolicy{
Version: "2012-10-17",
Statements: []*awsIamPolicyStatement{
{
Effect: "Allow",
Actions: []string{
"s3:GetObject",
"s3:GetObjectVersion",
"s3:PutObject",
"s3:DeleteObject",
"s3:ListBucket",
"s3:GetBucketLocation",
},
Resources: []string{
fmt.Sprintf("arn:aws:s3:::%s/*", bucket),
fmt.Sprintf("arn:aws:s3:::%s", bucket),
},
Principal: map[string]string{
"AWS": fmt.Sprintf("arn:aws:iam::%s:root", d.Get("databricks_account_id").(string)),
},
},
},
}
if v, ok := d.GetOk("full_access_role"); ok {
policy.Statements[0].Principal["AWS"] = v.(string)
}
policyJSON, err := json.MarshalIndent(policy, "", " ")
if err != nil {
return diag.FromErr(err)
}
d.SetId(bucket)
// nolint
d.Set("json", string(policyJSON))
return nil
},
Schema: map[string]*schema.Schema{
"databricks_account_id": {
Type: schema.TypeString,
Default: "414351767826",
Optional: true,
},
"full_access_role": {
Type: schema.TypeString,
Optional: true,
},
"bucket": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringMatch(
regexp.MustCompile(`^[0-9a-zA-Z_-]+$`),
"must contain only alphanumeric, underscore, and hyphen characters"),
},
"json": {
Type: schema.TypeString,
Computed: true,
ForceNew: true,
},
},
}
}
39 changes: 39 additions & 0 deletions aws/data_aws_bucket_policy_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package aws

import (
"testing"

"github.com/databrickslabs/terraform-provider-databricks/qa"
"github.com/stretchr/testify/assert"
)

func TestDataAwsBucketPolicy(t *testing.T) {
d, err := qa.ResourceFixture{
Read: true,
Resource: DataAwsBucketPolicy(),
NonWritable: true,
ID: ".",
HCL: `
bucket = "abc"
`,
}.Apply(t)
assert.NoError(t, err)
j := d.Get("json")
assert.Lenf(t, j, 440, "Strange length for policy: %s", j)
}

func TestDataAwsBucketPolicy_FullAccessRole(t *testing.T) {
d, err := qa.ResourceFixture{
Read: true,
Resource: DataAwsBucketPolicy(),
NonWritable: true,
ID: ".",
HCL: `
bucket = "abc"
full_access_role = "bcd"
`,
}.Apply(t)
assert.NoError(t, err)
j := d.Get("json")
assert.Lenf(t, j, 413, "Strange length for policy: %s", j)
}
Loading

0 comments on commit 1cbdf62

Please sign in to comment.