diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 4f7c09c16c..18189036c9 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -3,6 +3,7 @@ name: build on: pull_request: types: [opened, synchronize] + paths-ignore: ['**.md'] push: branches: [master] diff --git a/CHANGELOG.md b/CHANGELOG.md index a7c7e99056..ae61d47b2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Version changelog +## 0.5.9 + +* Added warning section for debug mode ([#1325](https://github.com/databrickslabs/terraform-provider-databricks/pull/1325)). +* Added ability to specify tags for `databricks_job` ([#1337](https://github.com/databrickslabs/terraform-provider-databricks/pull/1337)). +* Upgraded AWS provider for AWS guides. Added examples for account-level identities ([#1332](https://github.com/databrickslabs/terraform-provider-databricks/pull/1332)). +* Updated docs to use `application_id` as privilege for `databricks_service_principal` ([#1336](https://github.com/databrickslabs/terraform-provider-databricks/pull/1336)). +* Added `databricks_service_principal_role` resource ([#1340](https://github.com/databrickslabs/terraform-provider-databricks/pull/1340)). +* Fixed itegration testing image ([#1342](https://github.com/databrickslabs/terraform-provider-databricks/pull/1342), [#1343](https://github.com/databrickslabs/terraform-provider-databricks/pull/1343)). +* Added `skip_validation` for `databricks_external_location` ([#1330](https://github.com/databrickslabs/terraform-provider-databricks/pull/1330)). +* Added `alert_on_last_attempt` to `databricks_job` ([#1341](https://github.com/databrickslabs/terraform-provider-databricks/pull/1341)). +* Skip `make test` on doc-only changes ([#1339](https://github.com/databrickslabs/terraform-provider-databricks/pull/1339)). +* Improve common package test coverage ([#1344](https://github.com/databrickslabs/terraform-provider-databricks/pull/1344)). +* Re-create purged cluster for `databricks_mount` for AWS S3 ([#1345](https://github.com/databrickslabs/terraform-provider-databricks/pull/1345)). + +Updated dependency versions: + +* Bump google.golang.org/api from 0.79.0 to 0.80.0 +* Bump github.com/Azure/go-autorest/autorest/adal from 0.9.19 to 0.9.20 + ## 0.5.8 * Update `aws_iam_policy_document` in `databricks_mws_customer_managed_keys` docs to restrict KMS policy to caller AWS account ([#1309](https://github.com/databrickslabs/terraform-provider-databricks/pull/1309)). diff --git a/README.md b/README.md index c71a89b491..f77464e10a 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,7 @@ terraform { required_providers { databricks = { source = "databrickslabs/databricks" - version = "0.5.8" + version = "0.5.9" } } } diff --git a/aws/resource_service_principal_role.go b/aws/resource_service_principal_role.go new file mode 100644 index 0000000000..a77542537a --- /dev/null +++ b/aws/resource_service_principal_role.go @@ -0,0 +1,33 @@ +package aws + +import ( + "context" + "fmt" + + "github.com/databrickslabs/terraform-provider-databricks/common" + "github.com/databrickslabs/terraform-provider-databricks/scim" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// ResourceServicePrincipalRole binds service principal and instance profile +func ResourceServicePrincipalRole() *schema.Resource { + r := common.NewPairID("service_principal_id", "role").BindResource(common.BindResource{ + CreateContext: func(ctx context.Context, servicePrincipalID, role string, c *common.DatabricksClient) error { + return scim.NewServicePrincipalsAPI(ctx, c).Patch(servicePrincipalID, scim.PatchRequest("add", "roles", role)) + }, + ReadContext: func(ctx context.Context, servicePrincipalID, roleARN string, c *common.DatabricksClient) error { + servicePrincipal, err := scim.NewServicePrincipalsAPI(ctx, c).Read(servicePrincipalID) + hasRole := scim.ComplexValues(servicePrincipal.Roles).HasValue(roleARN) + if err == nil && !hasRole { + return common.NotFound("Service Principal has no role") + } + return err + }, + DeleteContext: func(ctx context.Context, servicePrincipalID, roleARN string, c *common.DatabricksClient) error { + return scim.NewServicePrincipalsAPI(ctx, c).Patch(servicePrincipalID, scim.PatchRequest( + "remove", fmt.Sprintf(`roles[value eq "%s"]`, roleARN), "")) + }, + }) + return r +} diff --git a/aws/resource_service_principal_role_test.go b/aws/resource_service_principal_role_test.go new file mode 100644 index 0000000000..260ee0e3b3 --- /dev/null +++ b/aws/resource_service_principal_role_test.go @@ -0,0 +1,135 @@ +package aws + +import ( + "testing" + + "github.com/databrickslabs/terraform-provider-databricks/common" + + "github.com/databrickslabs/terraform-provider-databricks/scim" + + "github.com/databrickslabs/terraform-provider-databricks/qa" +) + +func TestResourceServicePrincipalRoleCreate(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "PATCH", + Resource: "/api/2.0/preview/scim/v2/ServicePrincipals/abc", + ExpectedRequest: scim.PatchRequest( + "add", + "roles", + "arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile"), + Response: scim.User{ + ID: "abc", + }, + }, + { + Method: "GET", + Resource: "/api/2.0/preview/scim/v2/ServicePrincipals/abc", + Response: scim.User{ + Schemas: []scim.URN{scim.ServicePrincipalSchema}, + DisplayName: "ABC SP", + Roles: []scim.ComplexValue{ + { + Value: "arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile", + }, + }, + ID: "abc", + }, + }, + }, + Resource: ResourceServicePrincipalRole(), + State: map[string]interface{}{ + "service_principal_id": "abc", + "role": "arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile", + }, + Create: true, + }.ApplyAndExpectData(t, map[string]interface{}{"id": "abc|arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile"}) +} + +func TestResourceServicePrincipalRoleCreate_Error(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "PATCH", + Resource: "/api/2.0/preview/scim/v2/ServicePrincipals/abc", + Response: common.APIErrorBody{ + ErrorCode: "INVALID_REQUEST", + Message: "Internal error happened", + }, + Status: 400, + }, + }, + Resource: ResourceServicePrincipalRole(), + State: map[string]interface{}{ + "service_principal_id": "abc", + "role": "arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile", + }, + Create: true, + }.ExpectError(t, "Internal error happened") +} + +func TestResourceServicePrincipalRoleRead(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/scim/v2/ServicePrincipals/abc", + Response: scim.User{ + Schemas: []scim.URN{scim.ServicePrincipalSchema}, + DisplayName: "ABC SP", + Roles: []scim.ComplexValue{ + { + Value: "arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile", + }, + }, + ID: "abc", + }, + }, + }, + Resource: ResourceServicePrincipalRole(), + Read: true, + ID: "abc|arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile", + }.ApplyAndExpectData(t, map[string]interface{}{"id": "abc|arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile"}) +} + +func TestResourceServicePrincipalRoleRead_NoRole(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/scim/v2/ServicePrincipals/abc", + Response: scim.User{ + Schemas: []scim.URN{scim.ServicePrincipalSchema}, + DisplayName: "ABC SP", + ID: "abc", + }, + }, + }, + Resource: ResourceServicePrincipalRole(), + Read: true, + Removed: true, + ID: "abc|arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile", + }.ApplyNoError(t) +} + +func TestResourceServicePrincipalRoleRead_NotFound(t *testing.T) { + qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/preview/scim/v2/ServicePrincipals/abc", + Response: common.APIErrorBody{ + ErrorCode: "NOT_FOUND", + Message: "Item not found", + }, + Status: 404, + }, + }, + Resource: ResourceServicePrincipalRole(), + Read: true, + Removed: true, + ID: "abc|arn:aws:iam::999999999999:instance-profile/my-fake-instance-profile", + }.ApplyNoError(t) +} diff --git a/catalog/resource_external_location.go b/catalog/resource_external_location.go index 28b493e6e4..930f375107 100644 --- a/catalog/resource_external_location.go +++ b/catalog/resource_external_location.go @@ -22,6 +22,7 @@ type ExternalLocationInfo struct { URL string `json:"url"` CredentialName string `json:"credential_name"` Comment string `json:"comment,omitempty"` + SkipValidation bool `json:"skip_validation,omitempty"` Owner string `json:"owner,omitempty" tf:"computed"` MetastoreID string `json:"metastore_id,omitempty" tf:"computed"` } @@ -74,6 +75,7 @@ func ResourceExternalLocation() *schema.Resource { Name: d.Id(), URL: el.URL, CredentialName: el.CredentialName, + SkipValidation: el.SkipValidation, Comment: el.Comment, Owner: el.Owner, }) diff --git a/commands/commands.go b/commands/commands.go index 7eba88d155..635c81f5eb 100644 --- a/commands/commands.go +++ b/commands/commands.go @@ -8,7 +8,6 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/clusters" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) @@ -52,7 +51,7 @@ func (a CommandsAPI) Execute(clusterID, language, commandStr string) common.Comm Summary: fmt.Sprintf("Cluster %s has to be running or resizing, but is %s", clusterID, cluster.State), } } - commandStr = internal.TrimLeadingWhitespace(commandStr) + commandStr = TrimLeadingWhitespace(commandStr) log.Printf("[INFO] Executing %s command on %s:\n%s", language, clusterID, commandStr) context, err := a.createContext(language, clusterID) if err != nil { diff --git a/internal/utils.go b/commands/leading_whitespace.go similarity index 81% rename from internal/utils.go rename to commands/leading_whitespace.go index 5c89ed7099..ae93ef3d7c 100644 --- a/internal/utils.go +++ b/commands/leading_whitespace.go @@ -1,10 +1,11 @@ -package internal +package commands import ( "strings" ) -// TrimLeadingWhitespace removes leading whitespace +// TrimLeadingWhitespace removes leading whitespace, so that Python code blocks +// that are embedded into Go code still could be interpreted properly. func TrimLeadingWhitespace(commandStr string) (newCommand string) { lines := strings.Split(strings.ReplaceAll(commandStr, "\t", " "), "\n") leadingWhitespace := 1<<31 - 1 diff --git a/internal/utils_test.go b/commands/leading_whitespace_test.go similarity index 92% rename from internal/utils_test.go rename to commands/leading_whitespace_test.go index d9b436922b..b627871c78 100644 --- a/internal/utils_test.go +++ b/commands/leading_whitespace_test.go @@ -1,4 +1,4 @@ -package internal +package commands import ( "testing" diff --git a/common/client.go b/common/client.go index ab59843a23..86b4b71ac7 100644 --- a/common/client.go +++ b/common/client.go @@ -320,6 +320,8 @@ func (c *DatabricksClient) niceAuthError(message string) error { } info = ". " + strings.Join(infos, ". ") } + info = strings.TrimSuffix(info, ".") + message = strings.TrimSuffix(message, ".") docUrl := "https://registry.terraform.io/providers/databrickslabs/databricks/latest/docs#authentication" return fmt.Errorf("%s%s. Please check %s for details", message, info, docUrl) } diff --git a/common/http.go b/common/http.go index 74adac0778..e704adca23 100644 --- a/common/http.go +++ b/common/http.go @@ -6,7 +6,7 @@ import ( "encoding/json" "errors" "fmt" - "io/ioutil" + "io" "log" "net/http" "net/url" @@ -179,7 +179,7 @@ func (c *DatabricksClient) commonErrorClarity(resp *http.Response) *APIError { } func (c *DatabricksClient) parseError(resp *http.Response) APIError { - body, err := ioutil.ReadAll(resp.Body) + body, err := io.ReadAll(resp.Body) if err != nil { return APIError{ Message: err.Error(), @@ -345,16 +345,21 @@ func (c *DatabricksClient) completeUrl(r *http.Request) error { return nil } +// scimPathVisitorFactory is a separate method for the sake of unit tests +func (c *DatabricksClient) scimVisitor(r *http.Request) error { + r.Header.Set("Content-Type", "application/scim+json; charset=utf-8") + if c.isAccountsClient() && c.AccountID != "" { + // until `/preview` is there for workspace scim, + // `/api/2.0` is added by completeUrl visitor + r.URL.Path = strings.ReplaceAll(r.URL.Path, "/api/2.0/preview", + fmt.Sprintf("/api/2.0/accounts/%s", c.AccountID)) + } + return nil +} + // Scim sets SCIM headers func (c *DatabricksClient) Scim(ctx context.Context, method, path string, request interface{}, response interface{}) error { - body, err := c.authenticatedQuery(ctx, method, path, request, c.completeUrl, func(r *http.Request) error { - r.Header.Set("Content-Type", "application/scim+json; charset=utf-8") - if c.isAccountsClient() && c.AccountID != "" { - // until `/preview` is there for workspace scim - r.URL.Path = strings.ReplaceAll(path, "/preview", fmt.Sprintf("/api/2.0/accounts/%s", c.AccountID)) - } - return nil - }) + body, err := c.authenticatedQuery(ctx, method, path, request, c.completeUrl, c.scimVisitor) if err != nil { return err } @@ -402,7 +407,9 @@ func (c *DatabricksClient) redactedDump(body []byte) (res string) { if len(body) == 0 { return } - + if body[0] != '{' { + return fmt.Sprintf("[non-JSON document of %d bytes]", len(body)) + } var requestMap map[string]interface{} err := json.Unmarshal(body, &requestMap) if err != nil { @@ -465,21 +472,21 @@ func (c *DatabricksClient) genericQuery(ctx context.Context, method, requestURL return nil, fmt.Errorf("DatabricksClient is not configured") } if err = c.rateLimiter.Wait(ctx); err != nil { - return nil, err + return nil, fmt.Errorf("rate limited: %w", err) } - requestBody, err := makeRequestBody(method, &requestURL, data, true) + requestBody, err := makeRequestBody(method, &requestURL, data) if err != nil { - return nil, err + return nil, fmt.Errorf("request marshal: %w", err) } request, err := http.NewRequestWithContext(ctx, method, requestURL, bytes.NewBuffer(requestBody)) if err != nil { - return nil, err + return nil, fmt.Errorf("new request: %w", err) } request.Header.Set("User-Agent", c.userAgent(ctx)) for _, requestVisitor := range visitors { err = requestVisitor(request) if err != nil { - return nil, err + return nil, fmt.Errorf("failed visitor: %w", err) } } headers := c.createDebugHeaders(request.Header, c.Host) @@ -488,78 +495,93 @@ func (c *DatabricksClient) genericQuery(ctx context.Context, method, requestURL r, err := retryablehttp.FromRequest(request) if err != nil { - return nil, err + return nil, err // no error invariants possible because of `makeRequestBody` } resp, err := c.httpClient.Do(r) // retryablehttp library now returns only wrapped errors var ae APIError if errors.As(err, &ae) { + // don't re-wrap, as upper layers may depend on handling common.APIError return nil, ae } if err != nil { - return nil, err + // i don't even know which errors in the real world would end up here. + // `retryablehttp` package nicely wraps _everything_ to `url.Error`. + return nil, fmt.Errorf("failed request: %w", err) } defer func() { if ferr := resp.Body.Close(); ferr != nil { - err = ferr + err = fmt.Errorf("failed to close: %w", ferr) } }() - body, err = ioutil.ReadAll(resp.Body) + body, err = io.ReadAll(resp.Body) if err != nil { - return nil, err + return nil, fmt.Errorf("response body: %w", err) } headers = c.createDebugHeaders(resp.Header, "") log.Printf("[DEBUG] %s %s %s <- %s %s", resp.Status, headers, c.redactedDump(body), method, strings.ReplaceAll(request.URL.Path, "\n", "")) return body, nil } -func makeRequestBody(method string, requestURL *string, data interface{}, marshalJSON bool) ([]byte, error) { +func makeQueryString(data interface{}) (string, error) { + inputVal := reflect.ValueOf(data) + inputType := reflect.TypeOf(data) + if inputType.Kind() == reflect.Map { + s := []string{} + keys := inputVal.MapKeys() + // sort map keys by their string repr, so that tests can be deterministic + sort.Slice(keys, func(i, j int) bool { + return keys[i].String() < keys[j].String() + }) + for _, k := range keys { + v := inputVal.MapIndex(k) + if v.IsZero() { + continue + } + s = append(s, fmt.Sprintf("%s=%s", + strings.Replace(url.QueryEscape(fmt.Sprintf("%v", k.Interface())), "+", "%20", -1), + strings.Replace(url.QueryEscape(fmt.Sprintf("%v", v.Interface())), "+", "%20", -1))) + } + return "?" + strings.Join(s, "&"), nil + } + if inputType.Kind() == reflect.Struct { + params, err := query.Values(data) + if err != nil { + return "", fmt.Errorf("cannot create query string: %w", err) + } + return "?" + params.Encode(), nil + } + return "", fmt.Errorf("unsupported query string data: %#v", data) +} + +func makeRequestBody(method string, requestURL *string, data interface{}) ([]byte, error) { var requestBody []byte if data == nil && (method == "DELETE" || method == "GET") { return requestBody, nil } if method == "GET" { - inputVal := reflect.ValueOf(data) - inputType := reflect.TypeOf(data) - switch inputType.Kind() { - case reflect.Map: - s := []string{} - keys := inputVal.MapKeys() - // sort map keys by their string repr, so that tests can be deterministic - sort.Slice(keys, func(i, j int) bool { - return keys[i].String() < keys[j].String() - }) - for _, k := range keys { - v := inputVal.MapIndex(k) - if v.IsZero() { - continue - } - s = append(s, fmt.Sprintf("%s=%s", - strings.Replace(url.QueryEscape(fmt.Sprintf("%v", k.Interface())), "+", "%20", -1), - strings.Replace(url.QueryEscape(fmt.Sprintf("%v", v.Interface())), "+", "%20", -1))) - } - *requestURL += "?" + strings.Join(s, "&") - case reflect.Struct: - params, err := query.Values(data) - if err != nil { - return nil, err - } - *requestURL += "?" + params.Encode() - default: - return requestBody, fmt.Errorf("unsupported request data: %#v", data) + qs, err := makeQueryString(data) + if err != nil { + return nil, err } - } else { - if marshalJSON { - bodyBytes, err := json.MarshalIndent(data, "", " ") - if err != nil { - return nil, err - } - requestBody = bodyBytes - } else { - requestBody = []byte(data.(string)) + *requestURL += qs + return requestBody, nil + } + if reader, ok := data.(io.Reader); ok { + raw, err := io.ReadAll(reader) + if err != nil { + return nil, fmt.Errorf("failed to read from reader: %w", err) } + return raw, nil + } + if str, ok := data.(string); ok { + return []byte(str), nil + } + bodyBytes, err := json.MarshalIndent(data, "", " ") + if err != nil { + return nil, fmt.Errorf("request marshal failure: %w", err) } - return requestBody, nil + return bodyBytes, nil } func onlyNBytes(j string, numBytes int) string { diff --git a/common/http_test.go b/common/http_test.go index c0a04e34a8..70e58c65e1 100644 --- a/common/http_test.go +++ b/common/http_test.go @@ -11,6 +11,7 @@ import ( "strings" "testing" + "github.com/hashicorp/go-retryablehttp" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -77,8 +78,11 @@ func (errReader) Read(p []byte) (n int, err error) { return 0, fmt.Errorf("test error") } -func (errReader) Close() error { - return fmt.Errorf("test error") +func (i errReader) Close() error { + if int(i) > 100 { + return fmt.Errorf("test error") + } + return nil } func TestParseError_IO(t *testing.T) { @@ -163,6 +167,7 @@ func TestParseError_SCIM(t *testing.T) { "detail": "Detailed SCIM message", "status": "MALFUNCTION", "string_value": "sensitive", + "token_value": "sensitive", "content": "sensitive" }`))), }) @@ -334,19 +339,123 @@ func TestScim(t *testing.T) { require.NoError(t, err) } +func TestScimFailingQuery(t *testing.T) { + err := (&DatabricksClient{ + Host: "https://localhost", + Token: "..", + }).Scim(context.Background(), "GET", "/foo", nil, nil) + assert.EqualError(t, err, "DatabricksClient is not configured") +} + +func TestScimVisitorForAccounts(t *testing.T) { + request := &http.Request{ + Header: http.Header{}, + URL: &url.URL{ + Path: "/api/2.0/preview/scim/v2/Users/abc", + }, + } + err := (&DatabricksClient{ + Host: "https://accounts.everywhere", + AccountID: "uuid", + }).scimVisitor(request) + assert.NoError(t, err) + assert.Equal(t, "application/scim+json; charset=utf-8", request.Header.Get("Content-Type")) + assert.Equal(t, "/api/2.0/accounts/uuid/scim/v2/Users/abc", request.URL.Path) +} + func TestMakeRequestBody(t *testing.T) { type x struct { Scope string `json:"scope" url:"scope"` } requestURL := "/a/b/c" - _, err := makeRequestBody("GET", &requestURL, x{"test"}, true) + _, err := makeRequestBody("GET", &requestURL, x{"test"}) require.NoError(t, err) assert.Equal(t, "/a/b/c?scope=test", requestURL) - body, _ := makeRequestBody("POST", &requestURL, "abc", false) + body, _ := makeRequestBody("POST", &requestURL, "abc") assert.Equal(t, []byte("abc"), body) } +func TestMakeRequestBodyFromReader(t *testing.T) { + requestURL := "/a/b/c" + body, err := makeRequestBody("PUT", &requestURL, strings.NewReader("abc")) + require.NoError(t, err) + assert.Equal(t, []byte("abc"), body) +} + +func TestMakeRequestBodyReaderError(t *testing.T) { + requestURL := "/a/b/c" + _, err := makeRequestBody("POST", &requestURL, errReader(1)) + assert.EqualError(t, err, "failed to read from reader: test error") +} + +func TestMakeRequestBodyJsonError(t *testing.T) { + requestURL := "/a/b/c" + type x struct { + Foo chan string `json:"foo"` + } + _, err := makeRequestBody("POST", &requestURL, x{make(chan string)}) + assert.EqualError(t, err, "request marshal failure: json: unsupported type: chan string") +} + +type failingUrlEncode string + +func (fue failingUrlEncode) EncodeValues(key string, v *url.Values) error { + return fmt.Errorf(string(fue)) +} + +func TestMakeRequestBodyQueryFailingEncode(t *testing.T) { + requestURL := "/a/b/c" + type x struct { + Foo failingUrlEncode `url:"foo"` + } + _, err := makeRequestBody("GET", &requestURL, x{failingUrlEncode("always failing")}) + assert.EqualError(t, err, "cannot create query string: always failing") +} + +func TestMakeRequestBodyQueryUnsupported(t *testing.T) { + requestURL := "/a/b/c" + _, err := makeRequestBody("GET", &requestURL, true) + assert.EqualError(t, err, "unsupported query string data: true") +} + +func TestReaderBodyIsNotDumped(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc( + func(rw http.ResponseWriter, req *http.Request) { + raw, err := ioutil.ReadAll(req.Body) + assert.NoError(t, err) + assert.Equal(t, "abc", string(raw)) + rw.WriteHeader(200) + })) + defer server.Close() + client := &DatabricksClient{ + Host: server.URL + "/", + Token: "..", + InsecureSkipVerify: true, + DebugHeaders: true, + } + err := client.Configure() + assert.NoError(t, err) + ctx := context.Background() + err = client.Post(ctx, "/workspace/import-file", strings.NewReader("abc"), nil) + assert.NoError(t, err) +} + +func TestRedactedDumpMalformedJsonReturnsEmptyString(t *testing.T) { + client := &DatabricksClient{} + res := client.redactedDump([]byte("{..}")) + assert.Equal(t, "", res) +} + +func TestRedactedDumpOverridesMaxBytes(t *testing.T) { + client := &DatabricksClient{ + DebugTruncateBytes: 1300, + } + res := client.redactedDump([]byte(`{"foo":"` + strings.Repeat("x", 1500) + `"}`)) + assert.Len(t, res, 1319) + assert.True(t, strings.HasSuffix(res, "... (35 more bytes)")) +} + func TestMakeRequestBodyForMap(t *testing.T) { requestURL := "/a" _, err := makeRequestBody("GET", &requestURL, map[string]int{ @@ -358,7 +467,7 @@ func TestMakeRequestBodyForMap(t *testing.T) { "c": 5, "b": 6, "d": 7, - }, true) + }) require.NoError(t, err) assert.Equal(t, "/a?a=2&b=6&c=5&d=7&e=1&f=3&g=4", requestURL) } @@ -446,3 +555,90 @@ func TestClient_HandleErrors(t *testing.T) { }) } } + +func TestGenericQueryNotConfigured(t *testing.T) { + _, err := (&DatabricksClient{}).genericQuery(context.Background(), "GET", "/foo", true) + assert.EqualError(t, err, "DatabricksClient is not configured") +} + +func TestGenericQueryStoppedContext(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + cancel() + client := &DatabricksClient{Host: "https://localhost", Token: ".."} + err := client.Configure() + assert.NoError(t, err) + _, err = client.genericQuery(ctx, "GET", "/foo", true) + assert.EqualError(t, err, "rate limited: context canceled") +} + +func TestGenericQueryMarshalError(t *testing.T) { + ctx := context.Background() + client := &DatabricksClient{Host: "https://localhost", Token: ".."} + err := client.Configure() + assert.NoError(t, err) + _, err = client.genericQuery(ctx, "POST", "/foo", errReader(1)) + assert.EqualError(t, err, "request marshal: failed to read from reader: test error") +} + +func TestGenericQueryInvalidMethod(t *testing.T) { + ctx := context.Background() + client := &DatabricksClient{Host: "https://localhost", Token: ".."} + err := client.Configure() + assert.NoError(t, err) + _, err = client.genericQuery(ctx, "😃", "/foo", strings.NewReader("abc")) + assert.EqualError(t, err, `new request: net/http: invalid method "😃"`) +} + +func TestGenericQueryFailingVisitor(t *testing.T) { + ctx := context.Background() + client := &DatabricksClient{Host: "https://localhost", Token: ".."} + err := client.Configure() + assert.NoError(t, err) + _, err = client.genericQuery(ctx, "POST", "/foo", strings.NewReader("abc"), + func(r *http.Request) error { + return fmt.Errorf("😃") + }) + assert.EqualError(t, err, `failed visitor: 😃`) +} + +func TestGenericQueryFailingRequest(t *testing.T) { + ctx := context.Background() + client := &DatabricksClient{Host: "https://localhost", Token: ".."} + err := client.Configure() + assert.NoError(t, err) + client.httpClient.RetryMax = 0 + client.httpClient.ErrorHandler = func(_ *http.Response, _ error, _ int) (*http.Response, error) { + return nil, fmt.Errorf("😃") + } + _, err = client.genericQuery(ctx, "PUT", "https://127.0.0.1/foo", strings.NewReader("abc")) + assert.EqualError(t, err, `failed request: 😃`) +} + +func TestGenericQueryFailingResponseBodyRead(t *testing.T) { + client, server := singleRequestServer(t, "GET", "/api/2.0/imaginary/endpoint", `{"a": "b"}`) + defer server.Close() + client.httpClient.RetryMax = 0 + client.httpClient.ResponseLogHook = func(_ retryablehttp.Logger, r *http.Response) { + r.Body = errReader(1) + } + ctx := context.Background() + _, err := client.genericQuery(ctx, "GET", fmt.Sprintf("%s/api/2.0/imaginary/endpoint", server.URL), nil) + assert.EqualError(t, err, "response body: test error") +} + +func TestGenericQueryFailingResponseBodyClose(t *testing.T) { + client, server := singleRequestServer(t, "GET", "/api/2.0/imaginary/endpoint", `{"a": "b"}`) + defer server.Close() + client.httpClient.RetryMax = 0 + client.httpClient.ResponseLogHook = func(_ retryablehttp.Logger, r *http.Response) { + r.Body = errReader(1000) + } + ctx := context.Background() + _, err := client.genericQuery(ctx, "GET", fmt.Sprintf("%s/api/2.0/imaginary/endpoint", server.URL), nil) + assert.EqualError(t, err, "failed to close: test error") +} + +func TestParseUnknownErrorStatusMalformed(t *testing.T) { + eb := (&DatabricksClient{}).parseUnknownError("malformed", nil, fmt.Errorf("test")) + assert.Equal(t, "UNKNOWN", eb.ErrorCode) +} diff --git a/common/version.go b/common/version.go index daa7f360e0..7325df867b 100644 --- a/common/version.go +++ b/common/version.go @@ -3,7 +3,7 @@ package common import "context" var ( - version = "0.5.8" + version = "0.5.9" // ResourceName is resource name without databricks_ prefix ResourceName contextKey = 1 // Provider is the current instance of provider diff --git a/docs/guides/aws-e2-firewall-hub-and-spoke.md b/docs/guides/aws-e2-firewall-hub-and-spoke.md index 36fe1d4f50..43465da93a 100644 --- a/docs/guides/aws-e2-firewall-hub-and-spoke.md +++ b/docs/guides/aws-e2-firewall-hub-and-spoke.md @@ -94,7 +94,7 @@ terraform { } aws = { source = "hashicorp/aws" - version = "3.49.0" + version = "~> 4.15.0" } } } diff --git a/docs/guides/aws-e2-firewall-workspace.md b/docs/guides/aws-e2-firewall-workspace.md index 419ea2b000..5bfad3372d 100644 --- a/docs/guides/aws-e2-firewall-workspace.md +++ b/docs/guides/aws-e2-firewall-workspace.md @@ -92,7 +92,7 @@ terraform { } aws = { source = "hashicorp/aws" - version = "3.49.0" + version = "~> 4.15.0" } } } diff --git a/docs/guides/aws-private-link-workspace.md b/docs/guides/aws-private-link-workspace.md index 45cca6770a..6bd13b0d4d 100644 --- a/docs/guides/aws-private-link-workspace.md +++ b/docs/guides/aws-private-link-workspace.md @@ -48,7 +48,7 @@ terraform { } aws = { source = "hashicorp/aws" - version = "3.49.0" + version = "~> 4.15.0" } } } diff --git a/docs/guides/aws-workspace.md b/docs/guides/aws-workspace.md index c5d4f8c4e9..0f07fdaa92 100644 --- a/docs/guides/aws-workspace.md +++ b/docs/guides/aws-workspace.md @@ -57,7 +57,7 @@ terraform { } aws = { source = "hashicorp/aws" - version = "3.49.0" + version = "~> 4.15.0" } } } diff --git a/docs/guides/unity-catalog.md b/docs/guides/unity-catalog.md index 9f2c3bffc9..00cf580c5d 100644 --- a/docs/guides/unity-catalog.md +++ b/docs/guides/unity-catalog.md @@ -12,7 +12,7 @@ This guide uses the following variables in configurations: - `databricks_account_username`: The username an account-level admin uses to log in to [https://accounts.cloud.databricks.com](https://accounts.cloud.databricks.com). - `databricks_account_password`: The password for `databricks_account_username`. -- `databricks_account_id`: The numeric ID for your Databricks account. When you are logged in, it appears in the bottom left corner of the page. +- `databricks_account_id`: The numeric ID for your Databricks account. When you are logged in, it appears in the bottom left corner of the [Databricks Account Console](https://accounts.cloud.databricks.com/) or [Azure Databricks Account Console](https://accounts.azuredatabricks.net). - `databricks_workspace_url`: Value of `workspace_url` attribute from [databricks_mws_workspaces](../resources/mws_workspaces.md#attribute-reference) resource. This guide is provided as-is and you can use this guide as the basis for your custom Terraform module. diff --git a/docs/index.md b/docs/index.md index 2e83ff6487..baefffaf8f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -342,7 +342,7 @@ terraform { required_providers { databricks = { source = "databrickslabs/databricks" - version = "0.5.8" + version = "0.5.9" } } } diff --git a/docs/resources/external_location.md b/docs/resources/external_location.md index 17419fdc94..31121eb29f 100644 --- a/docs/resources/external_location.md +++ b/docs/resources/external_location.md @@ -84,6 +84,7 @@ The following arguments are required: * `credential_name` - Name of the [databricks_storage_credential](storage_credential.md) to use with this External Location. * `owner` - (Optional) Username/groupname of External Location owner. Currently this field can only be changed after the resource is created. * `comment` - (Optional) User-supplied free-form text. +* `skip_validation` - (Optional) Suppress validation errors if any & force save the external location ## Import diff --git a/docs/resources/group.md b/docs/resources/group.md index ffd24ae2a4..4c751f3b17 100644 --- a/docs/resources/group.md +++ b/docs/resources/group.md @@ -3,7 +3,9 @@ subcategory: "Security" --- # databricks_group Resource -This resource allows you to manage [groups in Databricks Workspace](https://docs.databricks.com/administration-guide/users-groups/groups.html) or [Account Console](https://accounts.cloud.databricks.com/) (for AWS deployments). You can also [associate](group_member.md) Databricks users and [service principals](service_principal.md) to groups. This is useful if you are using an application to sync users & groups with SCIM API. +This resource allows you to manage [groups in Databricks Workspace](https://docs.databricks.com/administration-guide/users-groups/groups.html), [Databricks Account Console](https://accounts.cloud.databricks.com/) or [Azure Databricks Account Console](https://accounts.azuredatabricks.net). You can also [associate](group_member.md) Databricks users and [service principals](service_principal.md) to groups. This is useful if you are using an application to sync users & groups with SCIM API. + +To create groups in the Databricks account, the provider must be configured with `host = "https://accounts.cloud.databricks.com"` on AWS deployments or `host = "https://accounts.azuredatabricks.net"` and authenticate using AAD tokens on Azure deployments Recommended to use along with Identity Provider SCIM provisioning to populate users into those groups: @@ -42,6 +44,39 @@ resource "databricks_group_member" "vip_member" { } ``` +Creating group in AWS Databricks account: +```hcl +// initialize provider at account-level +provider "databricks" { + alias = "mws" + host = "https://accounts.cloud.databricks.com" + account_id = "00000000-0000-0000-0000-000000000000" + username = var.databricks_account_username + password = var.databricks_account_password +} + +resource "databricks_group" "this" { + provider = databricks.mws + display_name = "Some Group" +} +``` + +Creating group in Azure Databricks account: +```hcl +// initialize provider at Azure account-level +provider "databricks" { + alias = "azure_account" + host = "https://accounts.azuredatabricks.net" + account_id = "00000000-0000-0000-0000-000000000000" + auth_type = "azure-cli" +} + +resource "databricks_group" "this" { + provider = databricks.azure_account + display_name = "Some Group" +} +``` + ## Argument Reference The following arguments are supported: diff --git a/docs/resources/group_member.md b/docs/resources/group_member.md index 39f5ef1518..08db78de6b 100644 --- a/docs/resources/group_member.md +++ b/docs/resources/group_member.md @@ -3,7 +3,9 @@ subcategory: "Security" --- # databricks_group_member Resource -This resource allows you to attach [users](user.md) and [groups](group.md) as group members. +This resource allows you to attach [users](user.md), [service_principal](service_principal.md), and [groups](group.md) as group members. + +To attach members to groups in the Databricks account, the provider must be configured with `host = "https://accounts.cloud.databricks.com"` on AWS deployments or `host = "https://accounts.azuredatabricks.net"` and authenticate using AAD tokens on Azure deployments ## Example Usage diff --git a/docs/resources/job.md b/docs/resources/job.md index d4b0b772ef..dbc54943d0 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -124,6 +124,7 @@ The following arguments are required: * `max_concurrent_runs` - (Optional) (Integer) An optional maximum allowed number of concurrent runs of the job. Defaults to *1*. * `email_notifications` - (Optional) (List) An optional set of email addresses notified when runs of this job begin and complete and when this job is deleted. The default behavior is to not send any emails. This field is a block and is documented below. * `schedule` - (Optional) (List) An optional periodic schedule for this job. The default behavior is that the job runs when triggered by clicking Run Now in the Jobs UI or sending an API request to runNow. This field is a block and is documented below. +* `tags` - (Optional) (Map) An optional map of the tags associated with the job. Specified tags will be used as cluster tags for job clusters. ### job_cluster Configuration Block [Shared job cluster](https://docs.databricks.com/jobs.html#use-shared-job-clusters) specification. Allows multiple tasks in the same job run to reuse the cluster. diff --git a/docs/resources/service_principal.md b/docs/resources/service_principal.md index fad991d319..ea675b103e 100644 --- a/docs/resources/service_principal.md +++ b/docs/resources/service_principal.md @@ -3,7 +3,9 @@ subcategory: "Security" --- # databricks_service_principal Resource -Directly manage [Service Principals](https://docs.databricks.com/administration-guide/users-groups/service-principals.html) that could be added to [databricks_group](group.md) within workspace. +Directly manage [Service Principals](https://docs.databricks.com/administration-guide/users-groups/service-principals.html) that could be added to [databricks_group](group.md) in Databricks workspace or account. + +To create service principals in the Databricks account, the provider must be configured with `host = "https://accounts.cloud.databricks.com"` on AWS deployments or `host = "https://accounts.azuredatabricks.net"` and authenticate using AAD tokens on Azure deployments ## Example Usage @@ -42,6 +44,39 @@ resource "databricks_service_principal" "sp" { } ``` +Creating service principal in AWS Databricks account: +```hcl +// initialize provider at account-level +provider "databricks" { + alias = "mws" + host = "https://accounts.cloud.databricks.com" + account_id = "00000000-0000-0000-0000-000000000000" + username = var.databricks_account_username + password = var.databricks_account_password +} + +resource "databricks_service_principal" "sp" { + provider = databricks.mws + display_name = "Automation-only SP" +} +``` + +Creating group in Azure Databricks account: +```hcl +// initialize provider at Azure account-level +provider "databricks" { + alias = "azure_account" + host = "https://accounts.azuredatabricks.net" + account_id = "00000000-0000-0000-0000-000000000000" + auth_type = "azure-cli" +} + +resource "databricks_service_principal" "sp" { + provider = databricks.azure_account + application_id = "00000000-0000-0000-0000-000000000000" +} +``` + ## Argument Reference -> `application_id` is required on Azure Databricks and is not allowed on other clouds. `display_name` is required on all clouds except Azure. diff --git a/docs/resources/service_principal_role.md b/docs/resources/service_principal_role.md new file mode 100644 index 0000000000..369f70c6e4 --- /dev/null +++ b/docs/resources/service_principal_role.md @@ -0,0 +1,51 @@ +--- +subcategory: "Security" +--- +# databricks_service_principal_role Resource + +This resource allows you to attach a role or [databricks_instance_profile](instance_profile.md) (AWS) to a [databricks_service_principal](service_principal.md). + +## Example Usage + +Granting a service principal access to an instance profile + +```hcl +resource "databricks_instance_profile" "instance_profile" { + instance_profile_arn = "my_instance_profile_arn" +} + +resource "databricks_service_principal" "this" { + display_name = "My Service Principal" +} + +resource "databricks_service_principal_role" "my_service_principal_instance_profile" { + service_principal_id = databricks_service_principal.this.id + role = databricks_instance_profile.instance_profile.id +} +``` +## Argument Reference + +The following arguments are supported: + +* `service_principal_id` - (Required) This is the id of the [service principal](service_principal.md) resource. +* `role` - (Required) This is the id of the role or [instance profile](instance_profile.md) resource. + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: + +* `id` - The id in the format `|`. + +## Import + +-> **Note** Importing this resource is not currently supported. + +## Related Resources + +The following resources are often used in the same context: + +* [End to end workspace management](../guides/workspace-management.md) guide. +* [databricks_user_role](user_instance_profile.md) to attach role or [databricks_instance_profile](instance_profile.md) (AWS) to [databricks_user](user.md). +* [databricks_group_instance_profile](group_instance_profile.md) to attach [databricks_instance_profile](instance_profile.md) (AWS) to [databricks_group](group.md). +* [databricks_group_member](group_member.md) to attach [users](user.md) and [groups](group.md) as group members. +* [databricks_instance_profile](instance_profile.md) to manage AWS EC2 instance profiles that users can launch [databricks_cluster](cluster.md) and access data, like [databricks_mount](mount.md). diff --git a/docs/resources/sql_permissions.md b/docs/resources/sql_permissions.md index 8719bffb35..8993b5f49a 100644 --- a/docs/resources/sql_permissions.md +++ b/docs/resources/sql_permissions.md @@ -73,7 +73,7 @@ The following arguments are available to specify the data object you need to enf You must specify one or many `privilege_assignments` configuration blocks to declare `privileges` to a `principal`, which corresponds to `display_name` of [databricks_group](group.md#display_name) or [databricks_user](user.md#display_name). Terraform would ensure that only those principals and privileges defined in the resource are applied for the data object and would remove anything else. It would not remove any transitive privileges. `DENY` statements are intentionally not supported. Every `privilege_assignments` has the following required arguments: -* `principal` - `display_name` of [databricks_group](group.md#display_name) or [databricks_user](user.md#display_name). +* `principal` - `display_name` for a [databricks_group](group.md#display_name) or [databricks_user](user.md#display_name), `application_id` for a [databricks_service_principal](service_principal.md). * `privileges` - set of available privilege names in upper case. [Available](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html) privilege names are: diff --git a/docs/resources/user.md b/docs/resources/user.md index 856c1d5ddd..5e600c5341 100644 --- a/docs/resources/user.md +++ b/docs/resources/user.md @@ -3,7 +3,9 @@ subcategory: "Security" --- # databricks_user Resource -This resource is used to [manage users](https://docs.databricks.com/administration-guide/users-groups/users.html), that could be added to [databricks_group](group.md) within the workspace. Upon user creation the user will receive a password reset email. You can also get information about caller identity using [databricks_current_user](../data-sources/current_user.md) data source. +This resource allows you to manage [users in Databricks Workspace](https://docs.databricks.com/administration-guide/users-groups/users.html), [Databricks Account Console](https://accounts.cloud.databricks.com/) or [Azure Databricks Account Console](https://accounts.azuredatabricks.net). You can also [associate](group_member.md) Databricks users to [databricks_group](group.md). Upon user creation the user will receive a password reset email. You can also get information about caller identity using [databricks_current_user](../data-sources/current_user.md) data source. + +To create users in the Databricks account, the provider must be configured with `host = "https://accounts.cloud.databricks.com"` on AWS deployments or `host = "https://accounts.azuredatabricks.net"` and authenticate using AAD tokens on Azure deployments ## Example Usage @@ -42,6 +44,41 @@ resource "databricks_user" "me" { } ``` +Creating user in AWS Databricks account: +```hcl +// initialize provider at account-level +provider "databricks" { + alias = "mws" + host = "https://accounts.cloud.databricks.com" + account_id = "00000000-0000-0000-0000-000000000000" + username = var.databricks_account_username + password = var.databricks_account_password +} + +resource "databricks_user" "account_user" { + provider = databricks.mws + user_name = "me@example.com" + display_name = "Example user" +} +``` + +Creating user in Azure Databricks account: +```hcl +// initialize provider at Azure account-level +provider "databricks" { + alias = "azure_account" + host = "https://accounts.azuredatabricks.net" + account_id = "00000000-0000-0000-0000-000000000000" + auth_type = "azure-cli" +} + +resource "databricks_user" "account_user" { + provider = databricks.mws + user_name = "me@example.com" + display_name = "Example user" +} +``` + ## Argument Reference The following arguments are available: diff --git a/exporter/importables_test.go b/exporter/importables_test.go index 83afabd617..7ece26c2e8 100644 --- a/exporter/importables_test.go +++ b/exporter/importables_test.go @@ -9,7 +9,7 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/clusters" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/jobs" "github.com/databrickslabs/terraform-provider-databricks/permissions" "github.com/databrickslabs/terraform-provider-databricks/policies" @@ -598,7 +598,7 @@ func TestNotebookGeneration(t *testing.T) { assert.NoError(t, err) ic.generateHclForResources(nil) - assert.Equal(t, internal.TrimLeadingWhitespace(` + assert.Equal(t, commands.TrimLeadingWhitespace(` resource "databricks_notebook" "firstsecond" { source = "${path.module}/notebooks/First/Second.py" path = "/First/Second" @@ -625,7 +625,7 @@ func TestGlobalInitScriptGen(t *testing.T) { }) ic.generateHclForResources(nil) - assert.Equal(t, internal.TrimLeadingWhitespace(` + assert.Equal(t, commands.TrimLeadingWhitespace(` resource "databricks_global_init_script" "new_importing_things" { source = "${path.module}/files/new_importing_things.sh" name = "New: Importing ^ Things" @@ -655,7 +655,7 @@ func TestSecretGen(t *testing.T) { }) ic.generateHclForResources(nil) - assert.Equal(t, internal.TrimLeadingWhitespace(` + assert.Equal(t, commands.TrimLeadingWhitespace(` resource "databricks_secret" "a_b" { string_value = var.string_value_a_b scope = "a" @@ -688,7 +688,7 @@ func TestDbfsFileGen(t *testing.T) { }) ic.generateHclForResources(nil) - assert.Equal(t, internal.TrimLeadingWhitespace(` + assert.Equal(t, commands.TrimLeadingWhitespace(` resource "databricks_dbfs_file" "_a_0cc175b9c0f1b6a831c399e269772661" { source = "${path.module}/files/_a_0cc175b9c0f1b6a831c399e269772661" path = "a" diff --git a/go.mod b/go.mod index 48cde3ae96..ef901029e7 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.16 require ( github.com/Azure/go-autorest/autorest v0.11.27 - github.com/Azure/go-autorest/autorest/adal v0.9.19 + github.com/Azure/go-autorest/autorest/adal v0.9.20 github.com/Azure/go-autorest/autorest/azure/auth v0.5.11 github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 github.com/golang-jwt/jwt/v4 v4.4.1 @@ -20,6 +20,6 @@ require ( golang.org/x/mod v0.5.1 golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac - google.golang.org/api v0.80.0 - gopkg.in/ini.v1 v1.66.4 + google.golang.org/api v0.81.0 + gopkg.in/ini.v1 v1.66.5 ) diff --git a/go.sum b/go.sum index 999e6063fa..435cd56cfd 100644 --- a/go.sum +++ b/go.sum @@ -58,8 +58,8 @@ github.com/Azure/go-autorest/autorest v0.11.24/go.mod h1:G6kyRlFnTuSbEYkQGawPfsC github.com/Azure/go-autorest/autorest v0.11.27 h1:F3R3q42aWytozkV8ihzcgMO4OA4cuqr3bNlsEuF6//A= github.com/Azure/go-autorest/autorest v0.11.27/go.mod h1:7l8ybrIdUmGqZMTD0sRtAr8NvbHjfofbf8RSP2q7w7U= github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= -github.com/Azure/go-autorest/autorest/adal v0.9.19 h1:5CHCGJjd9AedaD0iHhMTCRJPypw+hyCrtT9sLk/dD0w= -github.com/Azure/go-autorest/autorest/adal v0.9.19/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= +github.com/Azure/go-autorest/autorest/adal v0.9.20 h1:gJ3E98kMpFB1MFqQCvA1yFab8vthOeD4VlFRQULxahg= +github.com/Azure/go-autorest/autorest/adal v0.9.20/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= github.com/Azure/go-autorest/autorest/azure/auth v0.5.11 h1:P6bYXFoao05z5uhOQzbC3Qd8JqF3jUoocoTeIxkp2cA= github.com/Azure/go-autorest/autorest/azure/auth v0.5.11/go.mod h1:84w/uV8E37feW2NCJ08uT9VBfjfUHpgLVnG2InYD6cg= github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 h1:0W/yGmFdTIT77fvdlGZ0LMISoLHFJ7Tx4U0yeB+uFs4= @@ -233,8 +233,9 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= -github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI= github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -491,8 +492,9 @@ golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4 h1:HVyaeDAYux4pnY+D/SiwmLOR36ewZ4iGQIIrtnuCjFA= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y= +golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -524,6 +526,7 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -585,8 +588,9 @@ golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6 h1:nonptSpoQ4vQjyraW20DXPAglgQfVnM9ZC6MmNLMR60= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -662,6 +666,7 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -698,8 +703,9 @@ google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/S google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= -google.golang.org/api v0.80.0 h1:IQWaGVCYnsm4MO3hh+WtSXMzMzuyFx/fuR8qkN3A0Qo= -google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= +google.golang.org/api v0.81.0 h1:o8WF5AvfidafWbFjsRyupxyEQJNUWxLZJCK5NXrxZZ8= +google.golang.org/api v0.81.0/go.mod h1:FA6Mb/bZxj706H2j+j2d6mHEEaHBmbbWnkfvmorOCko= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -781,8 +787,10 @@ google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3 h1:q1kiSVscqoDeqTF27eQ2NnLLDmqF0I373qQNXYMy0fo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd h1:e0TwkXOdbnH/1x5rc5MZ/VYyiZ4v+RdVfrGMqEwT68I= +google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -812,8 +820,9 @@ google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9K google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2 h1:u+MLGgVf7vRdjEYZ8wDFhAVNmhkbJ5hmrA1LMWK1CAQ= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0/go.mod h1:DNq5QpG7LJqD2AamLZ7zvKE0DEpVl2BSEVjFycAAjRY= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -838,8 +847,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= -gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.5 h1:zfiCO0p88Fj4f6NR6KR5WdGMQ02U8vlDnN6HuD2xv5o= +gopkg.in/ini.v1 v1.66.5/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/internal/acceptance/acceptance.go b/internal/acceptance/acceptance.go index 732c6a5c01..3c6abe5810 100644 --- a/internal/acceptance/acceptance.go +++ b/internal/acceptance/acceptance.go @@ -8,12 +8,9 @@ import ( "strings" "testing" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" "github.com/databrickslabs/terraform-provider-databricks/internal/compute" - - "github.com/databrickslabs/terraform-provider-databricks/qa" - "github.com/databrickslabs/terraform-provider-databricks/provider" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -75,7 +72,7 @@ func Test(t *testing.T, steps []Step, otherVars ...map[string]string) { stepConfig := "" for _, s := range steps { if s.Template != "" { - stepConfig = qa.EnvironmentTemplate(t, s.Template, vars) + stepConfig = EnvironmentTemplate(t, s.Template, vars) } ts = append(ts, resource.TestStep{ Config: stepConfig, @@ -152,7 +149,7 @@ func AccTest(t *testing.T, tc resource.TestCase) { if s.Config != "" { t.Logf("Test %s (%s) step %d config is:\n%s", t.Name(), cloudEnv, i, - internal.TrimLeadingWhitespace(s.Config)) + commands.TrimLeadingWhitespace(s.Config)) } } } diff --git a/internal/acceptance/environment_template.go b/internal/acceptance/environment_template.go new file mode 100644 index 0000000000..ae504b8b8b --- /dev/null +++ b/internal/acceptance/environment_template.go @@ -0,0 +1,62 @@ +package acceptance + +import ( + "errors" + "fmt" + "os" + "regexp" + "strings" + "testing" + + "github.com/databrickslabs/terraform-provider-databricks/commands" + "github.com/databrickslabs/terraform-provider-databricks/qa" +) + +// For writing a unit test to intercept the errors (t.Fatalf literally ends the test in failure) +func environmentTemplate(t *testing.T, template string, otherVars ...map[string]string) (string, error) { + vars := map[string]string{ + "RANDOM": qa.RandomName("t"), + } + if len(otherVars) > 1 { + return "", errors.New("cannot have more than one custom variable map") + } + if len(otherVars) == 1 { + for k, v := range otherVars[0] { + vars[k] = v + } + } + // pullAll otherVars + missing := 0 + var varType, varName, value string + r := regexp.MustCompile(`{(env|var).([^{}]*)}`) + for _, variableMatch := range r.FindAllStringSubmatch(template, -1) { + value = "" + varType = variableMatch[1] + varName = variableMatch[2] + switch varType { + case "env": + value = os.Getenv(varName) + case "var": + value = vars[varName] + } + if value == "" { + t.Logf("Missing %s %s variable.", varType, varName) + missing++ + continue + } + template = strings.ReplaceAll(template, `{`+varType+`.`+varName+`}`, value) + } + if missing > 0 { + return "", fmt.Errorf("please set %d variables and restart", missing) + } + return commands.TrimLeadingWhitespace(template), nil +} + +// EnvironmentTemplate asserts existence and fills in {env.VAR} & {var.RANDOM} placeholders in template +func EnvironmentTemplate(t *testing.T, template string, otherVars ...map[string]string) string { + resp, err := environmentTemplate(t, template, otherVars...) + if err != nil { + t.Skipf(err.Error()) + } + return resp +} \ No newline at end of file diff --git a/internal/acceptance/environment_template_test.go b/internal/acceptance/environment_template_test.go new file mode 100644 index 0000000000..73553032cc --- /dev/null +++ b/internal/acceptance/environment_template_test.go @@ -0,0 +1,44 @@ +package acceptance + +import ( + "fmt" + "os" + "testing" + + "github.com/databrickslabs/terraform-provider-databricks/common" + "github.com/databrickslabs/terraform-provider-databricks/qa" + "github.com/stretchr/testify/assert" +) + + +func TestEnvironmentTemplate(t *testing.T) { + defer common.CleanupEnvironment() + err := os.Setenv("USER", qa.RandomName()) + assert.NoError(t, err) + + res := EnvironmentTemplate(t, ` + resource "user" "me" { + name = "{env.USER}" + email = "{env.USER}+{var.RANDOM}@example.com" + }`) + assert.Equal(t, os.Getenv("USER"), qa.FirstKeyValue(t, res, "name")) +} + +func TestEnvironmentTemplate_other_vars(t *testing.T) { + otherVar := map[string]string{"TEST": "value"} + res := EnvironmentTemplate(t, ` + resource "user" "me" { + name = "{var.TEST}" + }`, otherVar) + assert.Equal(t, otherVar["TEST"], qa.FirstKeyValue(t, res, "name")) +} + +func TestEnvironmentTemplate_unset_env(t *testing.T) { + res, err := environmentTemplate(t, ` + resource "user" "me" { + name = "{env.USER}" + email = "{env.USER}+{var.RANDOM}@example.com" + }`) + assert.Equal(t, "", res) + assert.Errorf(t, err, fmt.Sprintf("please set %d variables and restart.", 2)) +} diff --git a/jobs/resource_job.go b/jobs/resource_job.go index f8ec02ff44..d0c39c49d0 100644 --- a/jobs/resource_job.go +++ b/jobs/resource_job.go @@ -62,6 +62,7 @@ type EmailNotifications struct { OnSuccess []string `json:"on_success,omitempty"` OnFailure []string `json:"on_failure,omitempty"` NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` } // CronSchedule contains the information for the quartz cron expression @@ -146,6 +147,7 @@ type JobSettings struct { Schedule *CronSchedule `json:"schedule,omitempty"` MaxConcurrentRuns int32 `json:"max_concurrent_runs,omitempty"` EmailNotifications *EmailNotifications `json:"email_notifications,omitempty" tf:"suppress_diff"` + Tags map[string]string `json:"tags,omitempty"` } func (js *JobSettings) isMultiTask() bool { diff --git a/mws/resource_workspace_test.go b/mws/resource_workspace_test.go index d4cdfa08fd..82708f0952 100644 --- a/mws/resource_workspace_test.go +++ b/mws/resource_workspace_test.go @@ -1151,7 +1151,7 @@ func TestWorkspaceTokenWrongAuthCornerCase(t *testing.T) { wsApi := NewWorkspacesAPI(context.Background(), client) noAuth := "cannot authenticate parent client: authentication is not configured " + - "for provider.. Please check https://registry.terraform.io/providers/" + + "for provider. Please check https://registry.terraform.io/providers/" + "databrickslabs/databricks/latest/docs#authentication for details" assert.EqualError(t, CreateTokenIfNeeded(wsApi, r.Schema, d), noAuth, "create") assert.EqualError(t, EnsureTokenExistsIfNeeded(wsApi, r.Schema, d), noAuth, "ensure") diff --git a/provider/provider.go b/provider/provider.go index 2db329ec91..c0c1764a12 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -103,6 +103,7 @@ func DatabricksProvider() *schema.Provider { "databricks_secret_scope": secrets.ResourceSecretScope(), "databricks_secret_acl": secrets.ResourceSecretACL(), "databricks_service_principal": scim.ResourceServicePrincipal(), + "databricks_service_principal_role": aws.ResourceServicePrincipalRole(), "databricks_sql_dashboard": sql.ResourceDashboard(), "databricks_sql_endpoint": sql.ResourceSQLEndpoint(), "databricks_sql_global_config": sql.ResourceSQLGlobalConfig(), diff --git a/provider/provider_test.go b/provider/provider_test.go index 64876589b7..842618938d 100644 --- a/provider/provider_test.go +++ b/provider/provider_test.go @@ -107,7 +107,7 @@ func TestConfig_TokenEnv(t *testing.T) { env: map[string]string{ "DATABRICKS_TOKEN": "x", }, - assertError: "authentication is not configured for provider.. Environment variables used: DATABRICKS_TOKEN", + assertError: "authentication is not configured for provider. Environment variables used: DATABRICKS_TOKEN", }.apply(t) } @@ -139,7 +139,7 @@ func TestConfig_UserPasswordEnv(t *testing.T) { "DATABRICKS_USERNAME": "x", "DATABRICKS_PASSWORD": "x", }, - assertError: "authentication is not configured for provider.." + + assertError: "authentication is not configured for provider." + " Environment variables used: DATABRICKS_USERNAME, DATABRICKS_PASSWORD", assertHost: "https://x", }.apply(t) diff --git a/qa/testing.go b/qa/testing.go index b86ec7ec19..56134dddf6 100644 --- a/qa/testing.go +++ b/qa/testing.go @@ -4,7 +4,6 @@ import ( "bytes" "context" "encoding/json" - "errors" "fmt" "log" "math/rand" @@ -18,7 +17,6 @@ import ( "github.com/Azure/go-autorest/autorest/azure" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/hcl" @@ -512,55 +510,6 @@ func fixHCL(v interface{}) interface{} { } } -// For writing a unit test to intercept the errors (t.Fatalf literally ends the test in failure) -func environmentTemplate(t *testing.T, template string, otherVars ...map[string]string) (string, error) { - vars := map[string]string{ - "RANDOM": RandomName("t"), - } - if len(otherVars) > 1 { - return "", errors.New("cannot have more than one custom variable map") - } - if len(otherVars) == 1 { - for k, v := range otherVars[0] { - vars[k] = v - } - } - // pullAll otherVars - missing := 0 - var varType, varName, value string - r := regexp.MustCompile(`{(env|var).([^{}]*)}`) - for _, variableMatch := range r.FindAllStringSubmatch(template, -1) { - value = "" - varType = variableMatch[1] - varName = variableMatch[2] - switch varType { - case "env": - value = os.Getenv(varName) - case "var": - value = vars[varName] - } - if value == "" { - t.Logf("Missing %s %s variable.", varType, varName) - missing++ - continue - } - template = strings.ReplaceAll(template, `{`+varType+`.`+varName+`}`, value) - } - if missing > 0 { - return "", fmt.Errorf("please set %d variables and restart", missing) - } - return internal.TrimLeadingWhitespace(template), nil -} - -// EnvironmentTemplate asserts existence and fills in {env.VAR} & {var.RANDOM} placeholders in template -func EnvironmentTemplate(t *testing.T, template string, otherVars ...map[string]string) string { - resp, err := environmentTemplate(t, template, otherVars...) - if err != nil { - t.Skipf(err.Error()) - } - return resp -} - // FirstKeyValue gets it from HCL string func FirstKeyValue(t *testing.T, str, key string) string { r := regexp.MustCompile(key + `\s+=\s+"([^"]*)"`) diff --git a/qa/testing_test.go b/qa/testing_test.go index 2ff9ed491f..efc3630de8 100644 --- a/qa/testing_test.go +++ b/qa/testing_test.go @@ -3,7 +3,6 @@ package qa import ( "context" "fmt" - "os" "testing" "github.com/databrickslabs/terraform-provider-databricks/common" @@ -28,38 +27,6 @@ func TestRandomName(t *testing.T) { assert.Equal(t, 14, len(n)) } -func TestEnvironmentTemplate(t *testing.T) { - defer common.CleanupEnvironment() - err := os.Setenv("USER", RandomName()) - assert.NoError(t, err) - - res := EnvironmentTemplate(t, ` - resource "user" "me" { - name = "{env.USER}" - email = "{env.USER}+{var.RANDOM}@example.com" - }`) - assert.Equal(t, os.Getenv("USER"), FirstKeyValue(t, res, "name")) -} - -func TestEnvironmentTemplate_other_vars(t *testing.T) { - otherVar := map[string]string{"TEST": "value"} - res := EnvironmentTemplate(t, ` - resource "user" "me" { - name = "{var.TEST}" - }`, otherVar) - assert.Equal(t, otherVar["TEST"], FirstKeyValue(t, res, "name")) -} - -func TestEnvironmentTemplate_unset_env(t *testing.T) { - res, err := environmentTemplate(t, ` - resource "user" "me" { - name = "{env.USER}" - email = "{env.USER}+{var.RANDOM}@example.com" - }`) - assert.Equal(t, "", res) - assert.Errorf(t, err, fmt.Sprintf("please set %d variables and restart.", 2)) -} - func TestResourceFixture(t *testing.T) { client, server, err := HttpFixtureClient(t, []HTTPFixture{ { diff --git a/scim/acceptance/resource_user_test.go b/scim/acceptance/resource_user_test.go index 87a46df008..55dc78cefa 100644 --- a/scim/acceptance/resource_user_test.go +++ b/scim/acceptance/resource_user_test.go @@ -44,7 +44,7 @@ func TestAccUserResource(t *testing.T) { if _, ok := os.LookupEnv("CLOUD_ENV"); !ok { t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set") } - config := qa.EnvironmentTemplate(t, ` + config := acceptance.EnvironmentTemplate(t, ` resource "databricks_user" "first" { user_name = "eerste+{var.RANDOM}@example.com" display_name = "Eerste {var.RANDOM}" diff --git a/scim/resource_service_principal.go b/scim/resource_service_principal.go index d5ffb43ed6..d97a7b67c9 100644 --- a/scim/resource_service_principal.go +++ b/scim/resource_service_principal.go @@ -31,7 +31,7 @@ func (a ServicePrincipalsAPI) Create(rsp User) (sp User, err error) { return sp, err } -func (a ServicePrincipalsAPI) read(servicePrincipalID string) (sp User, err error) { +func (a ServicePrincipalsAPI) Read(servicePrincipalID string) (sp User, err error) { servicePrincipalPath := fmt.Sprintf("/preview/scim/v2/ServicePrincipals/%v", servicePrincipalID) err = a.client.Scim(a.context, "GET", servicePrincipalPath, nil, &sp) return @@ -51,9 +51,14 @@ func (a ServicePrincipalsAPI) filter(filter string) (u []User, err error) { return } +// Patch updates resource-friendly entity +func (a ServicePrincipalsAPI) Patch(servicePrincipalID string, r patchRequest) error { + return a.client.Scim(a.context, http.MethodPatch, fmt.Sprintf("/preview/scim/v2/ServicePrincipals/%v", servicePrincipalID), r, nil) +} + // Update replaces resource-friendly-entity func (a ServicePrincipalsAPI) Update(servicePrincipalID string, updateRequest User) error { - servicePrincipal, err := a.read(servicePrincipalID) + servicePrincipal, err := a.Read(servicePrincipalID) if err != nil { return err } @@ -114,7 +119,7 @@ func ResourceServicePrincipal() *schema.Resource { return nil }, Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - sp, err := NewServicePrincipalsAPI(ctx, c).read(d.Id()) + sp, err := NewServicePrincipalsAPI(ctx, c).Read(d.Id()) if err != nil { return err } diff --git a/scripts/Dockerfile b/scripts/Dockerfile index f10582f63c..b5938bd34e 100644 --- a/scripts/Dockerfile +++ b/scripts/Dockerfile @@ -4,9 +4,8 @@ RUN apk add jq \ && apk add go \ && apk add python3 \ && apk add make \ - && go get gotest.tools/gotestsum \ - && go get honnef.co/go/tools/cmd/staticcheck \ - && go get github.com/katbyte/terrafmt + && go install gotest.tools/gotestsum@latest \ + && go install honnef.co/go/tools/cmd/staticcheck@latest RUN mkdir /src \ && ln -s /root/go/bin/gotestsum /bin/gotestsum \ @@ -17,4 +16,4 @@ COPY . . RUN make install -ENTRYPOINT [ "/src/scripts/it.sh" ] \ No newline at end of file +ENTRYPOINT [ "/src/scripts/it.sh" ] diff --git a/secrets/acceptance/secret_acl_test.go b/secrets/acceptance/secret_acl_test.go index 5e64f5dd59..2003856567 100644 --- a/secrets/acceptance/secret_acl_test.go +++ b/secrets/acceptance/secret_acl_test.go @@ -10,7 +10,6 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/common" "github.com/databrickslabs/terraform-provider-databricks/internal/acceptance" - "github.com/databrickslabs/terraform-provider-databricks/qa" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/stretchr/testify/assert" @@ -24,7 +23,7 @@ func TestAccSecretAclResource(t *testing.T) { acceptance.AccTest(t, resource.TestCase{ Steps: []resource.TestStep{ { - Config: qa.EnvironmentTemplate(t, ` + Config: acceptance.EnvironmentTemplate(t, ` resource "databricks_group" "ds" { display_name = "data-scientists-{var.RANDOM}" } @@ -69,7 +68,7 @@ func TestAccSecretAclResourceDefaultPrincipal(t *testing.T) { acceptance.AccTest(t, resource.TestCase{ Steps: []resource.TestStep{ { - Config: qa.EnvironmentTemplate(t, ` + Config: acceptance.EnvironmentTemplate(t, ` resource "databricks_secret_scope" "app" { name = "app-{var.RANDOM}" initial_manage_principal = "users" diff --git a/secrets/acceptance/secret_test.go b/secrets/acceptance/secret_test.go index 26c2a812c3..c1205d650a 100644 --- a/secrets/acceptance/secret_test.go +++ b/secrets/acceptance/secret_test.go @@ -19,7 +19,7 @@ func TestAccSecretResource(t *testing.T) { if _, ok := os.LookupEnv("CLOUD_ENV"); !ok { t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set") } - config := qa.EnvironmentTemplate(t, ` + config := acceptance.EnvironmentTemplate(t, ` resource "databricks_secret_scope" "this" { name = "tf-scope-{var.RANDOM}" } diff --git a/storage/acceptance/adls_gen1_test.go b/storage/acceptance/adls_gen1_test.go index 55d3fe13a3..66e6831c05 100644 --- a/storage/acceptance/adls_gen1_test.go +++ b/storage/acceptance/adls_gen1_test.go @@ -22,7 +22,7 @@ func TestAzureAccAdlsGen1Mount_correctly_mounts(t *testing.T) { acceptance.AccTest(t, resource.TestCase{ Steps: []resource.TestStep{ { - Config: qa.EnvironmentTemplate(t, ` + Config: acceptance.EnvironmentTemplate(t, ` resource "databricks_secret_scope" "terraform" { name = "terraform-{var.RANDOM}" initial_manage_principal = "users" diff --git a/storage/acceptance/adls_gen2_test.go b/storage/acceptance/adls_gen2_test.go index 356579e49f..5a2c7efb9f 100644 --- a/storage/acceptance/adls_gen2_test.go +++ b/storage/acceptance/adls_gen2_test.go @@ -22,7 +22,7 @@ func TestAzureAccAdlsGen2Mount_correctly_mounts(t *testing.T) { acceptance.AccTest(t, resource.TestCase{ Steps: []resource.TestStep{ { - Config: qa.EnvironmentTemplate(t, ` + Config: acceptance.EnvironmentTemplate(t, ` resource "databricks_secret_scope" "terraform" { name = "terraform-{var.RANDOM}" initial_manage_principal = "users" diff --git a/storage/acceptance/aws_s3_mount_test.go b/storage/acceptance/aws_s3_mount_test.go index 1e334f4db2..660993986b 100644 --- a/storage/acceptance/aws_s3_mount_test.go +++ b/storage/acceptance/aws_s3_mount_test.go @@ -43,7 +43,7 @@ func TestAwsAccS3IamMount_WithCluster(t *testing.T) { if instanceProfilesAPI.IsRegistered(arn) { return false } - config := qa.EnvironmentTemplate(t, ` + config := acceptance.EnvironmentTemplate(t, ` resource "databricks_instance_profile" "this" { instance_profile_arn = "{env.TEST_EC2_INSTANCE_PROFILE}" } @@ -89,7 +89,7 @@ func TestAwsAccS3IamMount_NoClusterGiven(t *testing.T) { ctx := context.WithValue(context.Background(), common.Current, t.Name()) instanceProfilesAPI := aws.NewInstanceProfilesAPI(ctx, client) instanceProfilesAPI.Synchronized(arn, func() bool { - config := qa.EnvironmentTemplate(t, ` + config := acceptance.EnvironmentTemplate(t, ` resource "databricks_instance_profile" "this" { instance_profile_arn = "{env.TEST_EC2_INSTANCE_PROFILE}" } diff --git a/storage/acceptance/azure_blob_mount_test.go b/storage/acceptance/azure_blob_mount_test.go index a76e531d71..b0017ced2c 100644 --- a/storage/acceptance/azure_blob_mount_test.go +++ b/storage/acceptance/azure_blob_mount_test.go @@ -36,7 +36,7 @@ func TestAzureAccBlobMount_correctly_mounts(t *testing.T) { if _, ok := os.LookupEnv("CLOUD_ENV"); !ok { t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set") } - config := qa.EnvironmentTemplate(t, ` + config := acceptance.EnvironmentTemplate(t, ` resource "databricks_secret_scope" "terraform" { name = "terraform-{var.RANDOM}" initial_manage_principal = "users" diff --git a/storage/adls_gen1_mount_test.go b/storage/adls_gen1_mount_test.go index 75a868e1b3..f321f4904b 100644 --- a/storage/adls_gen1_mount_test.go +++ b/storage/adls_gen1_mount_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/databrickslabs/terraform-provider-databricks/clusters" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" "github.com/databrickslabs/terraform-provider-databricks/qa" "github.com/stretchr/testify/assert" @@ -27,7 +27,7 @@ func TestResourceAdlsGen1Mount_Create(t *testing.T) { }, Resource: ResourceAzureAdlsGen1Mount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "adl://test-adls.azuredatalakestore.net") diff --git a/storage/adls_gen2_mount_test.go b/storage/adls_gen2_mount_test.go index 34eb5dce0c..ea1c907f72 100644 --- a/storage/adls_gen2_mount_test.go +++ b/storage/adls_gen2_mount_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/databrickslabs/terraform-provider-databricks/clusters" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" "github.com/databrickslabs/terraform-provider-databricks/qa" "github.com/stretchr/testify/assert" @@ -27,7 +27,7 @@ func TestResourceAdlsGen2Mount_Create(t *testing.T) { }, Resource: ResourceAzureAdlsGen2Mount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "abfss://e@test-adls-gen2.dfs.core.windows.net") diff --git a/storage/aws_s3_mount.go b/storage/aws_s3_mount.go index b6d4bfcf83..a0388dfe93 100644 --- a/storage/aws_s3_mount.go +++ b/storage/aws_s3_mount.go @@ -116,11 +116,7 @@ func preprocessS3Mount(ctx context.Context, d *schema.ResourceData, m interface{ return fmt.Errorf("cluster %s must have EC2 instance profile attached", clusterID) } } else if instanceProfile != "" { - cluster, err := GetOrCreateMountingClusterWithInstanceProfile(clustersAPI, instanceProfile) - if err != nil { - return err - } - return d.Set("cluster_id", cluster.ClusterID) + return mountS3ViaProfileAndSetClusterID(clustersAPI, instanceProfile, d) } return nil } diff --git a/storage/aws_s3_mount_test.go b/storage/aws_s3_mount_test.go index 7dda4a96cd..e6c7339673 100644 --- a/storage/aws_s3_mount_test.go +++ b/storage/aws_s3_mount_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/databrickslabs/terraform-provider-databricks/clusters" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" "github.com/databrickslabs/terraform-provider-databricks/qa" "github.com/stretchr/testify/assert" @@ -36,7 +36,7 @@ func TestResourceAwsS3MountCreate(t *testing.T) { }, Resource: ResourceAWSS3Mount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, testS3BucketPath) // bucketname @@ -82,7 +82,7 @@ func TestResourceAwsS3MountCreate_invalid_arn(t *testing.T) { }, Create: true, }.Apply(t) - require.EqualError(t, err, "invalid arn: this_mount") + require.EqualError(t, err, "mount via profile: invalid arn: this_mount") } func TestResourceAwsS3MountRead(t *testing.T) { @@ -102,7 +102,7 @@ func TestResourceAwsS3MountRead(t *testing.T) { }, Resource: ResourceAWSS3Mount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "dbutils.fs.mounts()") assert.Contains(t, trunc, `mount.mountPoint == "/mnt/this_mount"`) @@ -141,7 +141,7 @@ func TestResourceAwsS3MountRead_NotFound(t *testing.T) { }, Resource: ResourceAWSS3Mount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -176,7 +176,7 @@ func TestResourceAwsS3MountRead_Error(t *testing.T) { }, Resource: ResourceAWSS3Mount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -213,7 +213,7 @@ func TestResourceAwsS3MountDelete(t *testing.T) { }, Resource: ResourceAWSS3Mount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "/mnt/this_mount") assert.Contains(t, trunc, "dbutils.fs.unmount(mount_point)") diff --git a/storage/azure_blob_mount_test.go b/storage/azure_blob_mount_test.go index 9a0a102927..7c7400ae0a 100644 --- a/storage/azure_blob_mount_test.go +++ b/storage/azure_blob_mount_test.go @@ -6,7 +6,7 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/clusters" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/qa" "github.com/stretchr/testify/assert" @@ -27,7 +27,7 @@ func TestResourceAzureBlobMountCreate(t *testing.T) { }, Resource: ResourceAzureBlobMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { @@ -105,7 +105,7 @@ func TestResourceAzureBlobMountRead(t *testing.T) { }, Resource: ResourceAzureBlobMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "dbutils.fs.mounts()") assert.Contains(t, trunc, `mount.mountPoint == "/mnt/e"`) @@ -145,7 +145,7 @@ func TestResourceAzureBlobMountRead_NotFound(t *testing.T) { }, Resource: ResourceAzureBlobMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -181,7 +181,7 @@ func TestResourceAzureBlobMountRead_Error(t *testing.T) { }, Resource: ResourceAzureBlobMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -219,7 +219,7 @@ func TestResourceAzureBlobMountDelete(t *testing.T) { }, Resource: ResourceAzureBlobMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "dbutils.fs.unmount(mount_point)") return common.CommandResults{ diff --git a/storage/mounts_test.go b/storage/mounts_test.go index 0c148da15f..acf3c62cda 100644 --- a/storage/mounts_test.go +++ b/storage/mounts_test.go @@ -6,7 +6,7 @@ import ( "testing" "github.com/databrickslabs/terraform-provider-databricks/clusters" - "github.com/databrickslabs/terraform-provider-databricks/internal" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/qa" @@ -47,7 +47,7 @@ func testMountFuncHelper(t *testing.T, mountFunc func(mp MountPoint, mount Mount c.WithCommandMock(func(commandStr string) common.CommandResults { called = true - assert.Equal(t, internal.TrimLeadingWhitespace(expectedCommand), internal.TrimLeadingWhitespace(commandStr)) + assert.Equal(t, commands.TrimLeadingWhitespace(expectedCommand), commands.TrimLeadingWhitespace(commandStr)) return common.CommandResults{ ResultType: "text", Data: expectedCommandResp, diff --git a/storage/resource_mount_test.go b/storage/resource_mount_test.go index 2de9dd3948..c3460ed325 100644 --- a/storage/resource_mount_test.go +++ b/storage/resource_mount_test.go @@ -6,7 +6,7 @@ import ( "github.com/databrickslabs/terraform-provider-databricks/clusters" "github.com/databrickslabs/terraform-provider-databricks/common" - "github.com/databrickslabs/terraform-provider-databricks/internal" + "github.com/databrickslabs/terraform-provider-databricks/commands" "github.com/databrickslabs/terraform-provider-databricks/qa" "github.com/Azure/go-autorest/autorest/azure" @@ -84,7 +84,7 @@ func TestResourceAwsS3MountGenericCreate(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, testS3BucketPath) // bucketname @@ -127,7 +127,7 @@ func TestResourceAwsS3MountGenericCreate_NoName(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, testS3BucketPath) // bucketname @@ -235,7 +235,7 @@ func TestResourceAwsS3MountGenericCreate_WithInstanceProfile(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, testS3BucketPath) // bucketname @@ -288,7 +288,7 @@ func TestResourceAwsS3MountGenericCreate_invalid_arn(t *testing.T) { }, Create: true, }.Apply(t) - require.EqualError(t, err, "invalid arn: this_mount") + require.EqualError(t, err, "mount via profile: invalid arn: this_mount") } func TestResourceAwsS3MountGenericRead(t *testing.T) { @@ -308,7 +308,7 @@ func TestResourceAwsS3MountGenericRead(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "dbutils.fs.mounts()") assert.Contains(t, trunc, `mount.mountPoint == "/mnt/this_mount"`) @@ -349,7 +349,7 @@ func TestResourceAwsS3MountGenericRead_NotFound(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -386,7 +386,7 @@ func TestResourceAwsS3MountGenericRead_Error(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -425,7 +425,7 @@ func TestResourceAwsS3MountDeleteGeneric(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "/mnt/this_mount") assert.Contains(t, trunc, "dbutils.fs.unmount(mount_point)") @@ -465,7 +465,7 @@ func TestResourceAdlsGen1MountGeneric_Create(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "adl://test-adls.azuredatalakestore.net") @@ -509,7 +509,7 @@ func TestResourceAdlsGen1MountGeneric_Create_ResourceID(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "adl://gen1.azuredatalakestore.net") @@ -630,7 +630,7 @@ func TestResourceAdlsGen2MountGeneric_Create(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "abfss://e@test-adls-gen2.dfs.core.windows.net") @@ -675,7 +675,7 @@ func TestResourceAdlsGen2MountGeneric_Create_ResourceID(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "abfss://e@test-adls-gen2.dfs.core.windows.net") @@ -718,7 +718,7 @@ func TestResourceAdlsGen2MountGeneric_Create_NoTenantID_SPN(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "abfss://e@test-adls-gen2.dfs.core.windows.net") @@ -764,7 +764,7 @@ func TestResourceAdlsGen2MountGeneric_Create_NoTenantID_CLI(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, "abfss://e@test-adls-gen2.dfs.core.windows.net") @@ -853,7 +853,7 @@ func TestResourceAzureBlobMountCreateGeneric(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { @@ -899,7 +899,7 @@ func TestResourceAzureBlobMountCreateGeneric_SAS(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { @@ -945,7 +945,7 @@ func TestResourceAzureBlobMountCreateGeneric_Resource_ID(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { @@ -1075,7 +1075,7 @@ func TestResourceAzureBlobMountGeneric_Read(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "dbutils.fs.mounts()") assert.Contains(t, trunc, `mount.mountPoint == "/mnt/e"`) @@ -1117,7 +1117,7 @@ func TestResourceAzureBlobMountGenericRead_NotFound(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -1155,7 +1155,7 @@ func TestResourceAzureBlobMountGenericRead_Error(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) return common.CommandResults{ ResultType: "error", @@ -1195,7 +1195,7 @@ func TestResourceAzureBlobMountGenericDelete(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) assert.Contains(t, trunc, "dbutils.fs.unmount(mount_point)") return common.CommandResults{ @@ -1265,7 +1265,7 @@ func TestResourceGcsMountGenericCreate_WithCluster(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, testGcsBucketPath) // bucketname @@ -1309,7 +1309,7 @@ func TestResourceGcsMountGenericCreate_WithCluster_NoName(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, testGcsBucketPath) // bucketname @@ -1414,7 +1414,7 @@ func TestResourceGcsMountGenericCreate_WithServiceAccount(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, testGcsBucketPath) // bucketname @@ -1472,7 +1472,7 @@ func TestResourceMountGenericCreate_WithUriAndOpts(t *testing.T) { }, Resource: ResourceMount(), CommandMock: func(commandStr string) common.CommandResults { - trunc := internal.TrimLeadingWhitespace(commandStr) + trunc := commands.TrimLeadingWhitespace(commandStr) t.Logf("Received command:\n%s", trunc) if strings.HasPrefix(trunc, "def safe_mount") { assert.Contains(t, trunc, abfssPath) // URI diff --git a/storage/s3.go b/storage/s3.go index 52f72b119b..cde206b9a6 100644 --- a/storage/s3.go +++ b/storage/s3.go @@ -62,6 +62,12 @@ func preprocessS3MountGeneric(ctx context.Context, s map[string]*schema.Schema, clustersAPI := clusters.NewClustersAPI(ctx, m) if clusterID != "" { clusterInfo, err := clustersAPI.Get(clusterID) + if common.IsMissing(err) { + if instanceProfile == "" { + return fmt.Errorf("instance profile is required to re-create mounting cluster") + } + return mountS3ViaProfileAndSetClusterID(clustersAPI, instanceProfile, d) + } if err != nil { return err } @@ -69,11 +75,16 @@ func preprocessS3MountGeneric(ctx context.Context, s map[string]*schema.Schema, return fmt.Errorf("cluster %s must have EC2 instance profile attached", clusterID) } } else if instanceProfile != "" { - cluster, err := GetOrCreateMountingClusterWithInstanceProfile(clustersAPI, instanceProfile) - if err != nil { - return err - } - return d.Set("cluster_id", cluster.ClusterID) + return mountS3ViaProfileAndSetClusterID(clustersAPI, instanceProfile, d) } return nil } + +func mountS3ViaProfileAndSetClusterID(clustersAPI clusters.ClustersAPI, + instanceProfile string, d *schema.ResourceData) error { + cluster, err := GetOrCreateMountingClusterWithInstanceProfile(clustersAPI, instanceProfile) + if err != nil { + return fmt.Errorf("mount via profile: %w", err) + } + return d.Set("cluster_id", cluster.ClusterID) +} diff --git a/storage/s3_test.go b/storage/s3_test.go new file mode 100644 index 0000000000..e3f2293cd4 --- /dev/null +++ b/storage/s3_test.go @@ -0,0 +1,106 @@ +package storage + +import ( + "context" + "testing" + + "github.com/databrickslabs/terraform-provider-databricks/clusters" + "github.com/databrickslabs/terraform-provider-databricks/common" + "github.com/databrickslabs/terraform-provider-databricks/qa" + "github.com/stretchr/testify/assert" +) + +func TestPreprocessS3MountOnDeletedClusterNoInstanceProfileSpecifiedError(t *testing.T) { + qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/clusters/get?cluster_id=removed-cluster", + Status: 404, + Response: common.NotFound("cluster deleted"), + }, + }, func(ctx context.Context, client *common.DatabricksClient) { + r := ResourceMount() + d := r.TestResourceData() + d.Set("uri", "s3://bucket") + d.Set("cluster_id", "removed-cluster") + err := preprocessS3MountGeneric(ctx, r.Schema, d, client) + assert.EqualError(t, err, "instance profile is required to re-create mounting cluster") + }) +} + +func TestPreprocessS3MountOnDeletedClusterWorks(t *testing.T) { + qa.HTTPFixturesApply(t, []qa.HTTPFixture{ + { + Method: "GET", + Resource: "/api/2.0/clusters/get?cluster_id=removed-cluster", + Status: 404, + Response: common.NotFound("cluster deleted"), + }, + { + Method: "GET", + Resource: "/api/2.0/clusters/list", + Response: clusters.ClusterList{ + Clusters: []clusters.ClusterInfo{}, + }, + }, + { + ReuseRequest: true, + Method: "GET", + Resource: "/api/2.0/clusters/spark-versions", + }, + { + ReuseRequest: true, + Method: "GET", + Resource: "/api/2.0/clusters/list-node-types", + }, + { + Method: "POST", + Resource: "/api/2.0/clusters/create", + ExpectedRequest: clusters.Cluster{ + CustomTags: map[string]string{ + "ResourceClass": "SingleNode", + }, + ClusterName: "terraform-mount-s3-access", + SparkVersion: "7.3.x-scala2.12", + NumWorkers: 0, + NodeTypeID: "i3.xlarge", + AwsAttributes: &clusters.AwsAttributes{ + Availability: "SPOT", + InstanceProfileArn: "arn:aws:iam::1234567:instance-profile/s3-access", + }, + AutoterminationMinutes: 10, + SparkConf: map[string]string{ + "spark.databricks.cluster.profile": "singleNode", + "spark.master": "local[*]", + "spark.scheduler.mode": "FIFO", + }, + }, + Response: clusters.ClusterID{ + ClusterID: "new-cluster", + }, + }, + { + Method: "GET", + Resource: "/api/2.0/clusters/get?cluster_id=new-cluster", + Response: clusters.ClusterInfo{ + ClusterID: "new-cluster", + State: "RUNNING", + StateMessage: "created", + }, + }, + }, func(ctx context.Context, client *common.DatabricksClient) { + r := ResourceMount() + d := r.TestResourceData() + d.MarkNewResource() + common.StructToData(GenericMount{ + URI: "s3://bucket", + ClusterID: "removed-cluster", + S3: &S3IamMount{ + InstanceProfile: "arn:aws:iam::1234567:instance-profile/s3-access", + }, + }, r.Schema, d) + err := preprocessS3MountGeneric(ctx, r.Schema, d, client) + assert.NoError(t, err) + assert.Equal(t, "new-cluster", d.Get("cluster_id")) + }) +}