Skip to content

Commit

Permalink
Further improvements of exporter (#1602)
Browse files Browse the repository at this point in the history
This is a separate PR for things found during improving SQL exporting:

* Make sure that names are unique for `databricks_group_member` and `databricks_group`
  resources - handling same names with different casing, and missing `DisplayName` for
  users
* Change selective export for DLT pipelines - built-in search wasn't case insensitive
* Adding export of SQL warehouses & handling references correctly for SQL & DBT tasks in
  `databricks_job`
  • Loading branch information
alexott authored Sep 14, 2022
1 parent 44941f0 commit af30395
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 12 deletions.
11 changes: 10 additions & 1 deletion exporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -925,6 +925,11 @@ func TestImportingJobs_JobListMultiTask(t *testing.T) {
Dashboard: &jobs.SqlDashboardTask{
DashboardID: "123",
},
WarehouseID: "123",
},
DbtTask: &jobs.DbtTask{
WarehouseId: "123",
Commands: []string{"dbt init"},
},
},
{
Expand Down Expand Up @@ -1506,12 +1511,16 @@ func TestImportingDLTPipelinesMatchingOnly(t *testing.T) {
emptyIpAccessLIst,
{
Method: "GET",
Resource: "/api/2.0/pipelines?filter=name%20LIKE%20%27%25test%25%27&max_results=50",
Resource: "/api/2.0/pipelines?max_results=50",

Response: pipelines.PipelineListResponse{
Statuses: []pipelines.PipelineStateInfo{
{
PipelineID: "123",
Name: "Pipeline1 test",
},
{
PipelineID: "124",
Name: "Pipeline1",
},
},
Expand Down
40 changes: 30 additions & 10 deletions exporter/importables.go
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,6 @@ var resourcesMap map[string]importable = map[string]importable{
{Path: "new_cluster.instance_pool_id", Resource: "databricks_instance_pool"},
{Path: "new_cluster.driver_instance_pool_id", Resource: "databricks_instance_pool"},
{Path: "existing_cluster_id", Resource: "databricks_cluster"},
{Path: "task.existing_cluster_id", Resource: "databricks_cluster"},
{Path: "library.jar", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "library.whl", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "library.egg", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
Expand All @@ -330,6 +329,10 @@ var resourcesMap map[string]importable = map[string]importable{
{Path: "task.spark_jar_task.jar_uri", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "task.notebook_task.notebook_path", Resource: "databricks_notebook"},
{Path: "task.pipeline_task.pipeline_id", Resource: "databricks_pipeline"},
{Path: "task.sql_task.query.query_id", Resource: "databricks_sql_query"},
{Path: "task.sql_task.dashboard.dashboard_id", Resource: "databricks_sql_dashboard"},
{Path: "task.sql_task.warehouse_id", Resource: "databricks_sql_endpoint"},
{Path: "task.dbt_task.warehouse_id", Resource: "databricks_sql_endpoint"},
{Path: "task.new_cluster.aws_attributes.instance_profile_arn", Resource: "databricks_instance_profile"},
{Path: "task.new_cluster.init_scripts.dbfs.destination", Resource: "databricks_dbfs_file"},
{Path: "task.new_cluster.instance_pool_id", Resource: "databricks_instance_pool"},
Expand Down Expand Up @@ -431,6 +434,20 @@ var resourcesMap map[string]importable = map[string]importable{
ID: task.SqlTask.Dashboard.DashboardID,
})
}
if task.SqlTask.WarehouseID != "" {
ic.Emit(&resource{
Resource: "databricks_sql_endpoint",
ID: task.SqlTask.WarehouseID,
})
}
}
if task.DbtTask != nil {
if task.SqlTask.WarehouseID != "" {
ic.Emit(&resource{
Resource: "databricks_sql_endpoint",
ID: task.SqlTask.WarehouseID,
})
}
}
ic.importCluster(task.NewCluster)
ic.Emit(&resource{
Expand Down Expand Up @@ -499,7 +516,7 @@ var resourcesMap map[string]importable = map[string]importable{
"databricks_group": {
Service: "groups",
Name: func(d *schema.ResourceData) string {
return d.Get("display_name").(string)
return d.Get("display_name").(string) + "_" + d.Id()
},
List: func(ic *importContext) error {
if err := ic.cacheGroups(); err != nil {
Expand Down Expand Up @@ -575,7 +592,7 @@ var resourcesMap map[string]importable = map[string]importable{
ic.Emit(&resource{
Resource: "databricks_group_member",
ID: fmt.Sprintf("%s|%s", parent.Value, g.ID),
Name: fmt.Sprintf("%s_%s", parent.Display, g.DisplayName),
Name: fmt.Sprintf("%s_%s_%s", parent.Display, parent.Value, g.DisplayName),
})
}
}
Expand All @@ -594,7 +611,7 @@ var resourcesMap map[string]importable = map[string]importable{
ic.Emit(&resource{
Resource: "databricks_group_member",
ID: fmt.Sprintf("%s|%s", g.ID, x.Value),
Name: fmt.Sprintf("%s_%s", g.DisplayName, x.Display),
Name: fmt.Sprintf("%s_%s_%s", g.DisplayName, x.Value, x.Display),
})
}
if len(g.Members) > 10 {
Expand Down Expand Up @@ -654,10 +671,14 @@ var resourcesMap map[string]importable = map[string]importable{
Resource: "databricks_group",
ID: g.Value,
})
userName := u.DisplayName
if userName == "" {
userName = u.UserName
}
ic.Emit(&resource{
Resource: "databricks_group_member",
ID: fmt.Sprintf("%s|%s", g.Value, u.ID),
Name: fmt.Sprintf("%s_%s", g.Display, u.DisplayName),
Name: fmt.Sprintf("%s_%s_%s", g.Display, g.Value, userName),
})
}
return nil
Expand Down Expand Up @@ -1249,15 +1270,14 @@ var resourcesMap map[string]importable = map[string]importable{
return name + "_" + d.Id()
},
List: func(ic *importContext) error {
filter := ""
if ic.match != "" {
filter = "name LIKE '%" + strings.ReplaceAll(ic.match, "'", "") + "%'"
}
pipelinesList, err := pipelines.NewPipelinesAPI(ic.Context, ic.Client).List(50, filter)
pipelinesList, err := pipelines.NewPipelinesAPI(ic.Context, ic.Client).List(50, "")
if err != nil {
return err
}
for i, q := range pipelinesList {
if !ic.MatchesName(q.Name) {
continue
}
ic.Emit(&resource{
Resource: "databricks_pipeline",
ID: q.PipelineID,
Expand Down
2 changes: 1 addition & 1 deletion exporter/importables_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ func TestGroup(t *testing.T) {
assert.True(t, ic.testEmits["databricks_group_instance_profile[<unknown>] (id: 123|abc)"])
assert.True(t, ic.testEmits["databricks_instance_profile[<unknown>] (id: abc)"])
assert.True(t, ic.testEmits["databricks_group[<unknown>] (id: parent-group)"])
assert.True(t, ic.testEmits["databricks_group_member[_foo] (id: parent-group|123)"])
assert.True(t, ic.testEmits["databricks_group_member[_parent-group_foo] (id: parent-group|123)"])
}

func TestPermissions(t *testing.T) {
Expand Down

0 comments on commit af30395

Please sign in to comment.