Skip to content

Commit

Permalink
Add data sources for retrieving UC tables
Browse files Browse the repository at this point in the history
* Added `databricks_catalogs` data
* Added `databricks_schemas` data
* Added `databricks_tables` data

Fixes #1105
  • Loading branch information
nfx committed Mar 4, 2022
1 parent be30dc6 commit 8fb949c
Show file tree
Hide file tree
Showing 10 changed files with 123 additions and 4 deletions.
6 changes: 6 additions & 0 deletions .metals/metals.lock.db
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#FileLock
#Fri Mar 04 20:10:36 CET 2022
hostName=localhost
id=17f563717e2108ab2ed927d0890b5d32b035b00a997
method=file
server=localhost\:58947
25 changes: 25 additions & 0 deletions catalog/data_catalogs.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package catalog

import (
"context"

"github.com/databrickslabs/terraform-provider-databricks/common"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

func DataSourceCatalogs() *schema.Resource {
var data struct {
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
}
return common.DataResource(&data, func(ctx context.Context, c *common.DatabricksClient) error {
catalogsAPI := NewCatalogsAPI(ctx, c)
catalogs, err := catalogsAPI.list()
if err != nil {
return err
}
for _, v := range catalogs.Catalogs {
data.Ids = append(data.Ids, v.Name)
}
return nil
})
}
26 changes: 26 additions & 0 deletions catalog/data_schemas.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package catalog

import (
"context"

"github.com/databrickslabs/terraform-provider-databricks/common"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

func DataSourceSchemas() *schema.Resource {
var data struct {
CatalogName string `json:"catalog_name"`
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
}
return common.DataResource(&data, func(ctx context.Context, c *common.DatabricksClient) error {
schemasAPI := NewSchemasAPI(ctx, c)
schemas, err := schemasAPI.listByCatalog(data.CatalogName)
if err != nil {
return err
}
for _, v := range schemas.Schemas {
data.Ids = append(data.Ids, v.FullName)
}
return nil
})
}
27 changes: 27 additions & 0 deletions catalog/data_tables.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package catalog

import (
"context"

"github.com/databrickslabs/terraform-provider-databricks/common"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

func DataSourceTables() *schema.Resource {
var data struct {
CatalogName string `json:"catalog_name"`
SchemaName string `json:"schema_name"`
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
}
return common.DataResource(&data, func(ctx context.Context, c *common.DatabricksClient) error {
tablesAPI := NewTablesAPI(ctx, c)
tables, err := tablesAPI.listTables(data.CatalogName, data.SchemaName)
if err != nil {
return err
}
for _, v := range tables.Tables {
data.Ids = append(data.Ids, v.FullName())
}
return nil
})
}
9 changes: 9 additions & 0 deletions catalog/resource_catalog.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,15 @@ type CatalogInfo struct {
MetastoreID string `json:"metastore_id,omitempty" tf:"computed"`
}

type Catalogs struct {
Catalogs []CatalogInfo `json:"catalogs"`
}

func (a CatalogsAPI) list() (catalogs Catalogs, err error) {
err = a.client.Get(a.context, "/unity-catalog/catalogs", nil, &catalogs)
return
}

func (a CatalogsAPI) createCatalog(ci *CatalogInfo) error {
return a.client.Post(a.context, "/unity-catalog/catalogs", ci, ci)
}
Expand Down
11 changes: 11 additions & 0 deletions catalog/resource_schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,17 @@ type SchemaInfo struct {
FullName string `json:"full_name,omitempty" tf:"computed"`
}

type Schemas struct {
Schemas []SchemaInfo `json:"schemas"`
}

func (a SchemasAPI) listByCatalog(catalogName string) (schemas Schemas, err error) {
err = a.client.Get(a.context, "/unity-catalog/schemas", map[string]string{
"catalog_name": catalogName,
}, &schemas)
return
}

func (a SchemasAPI) createSchema(si *SchemaInfo) error {
return a.client.Post(a.context, "/unity-catalog/schemas", si, si)
}
Expand Down
14 changes: 13 additions & 1 deletion catalog/resource_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ type TableInfo struct {
TableType string `json:"table_type"`
DataSourceFormat string `json:"data_source_format"`
ColumnInfos []ColumnInfo `json:"columns" tf:"alias:column"`
StorageLocation string `json:"storage_location"`
StorageLocation string `json:"storage_location,omitempty"`
StorageCredentialName string `json:"storage_credential_name,omitempty"`
ViewDefinition string `json:"view_definition,omitempty"`
Owner string `json:"owner,omitempty" tf:"computed"`
Expand All @@ -50,6 +50,18 @@ func (ti TableInfo) FullName() string {
return fmt.Sprintf("%s.%s.%s", ti.CatalogName, ti.SchemaName, ti.Name)
}

type Tables struct {
Tables []TableInfo `json:"tables"`
}

func (a TablesAPI) listTables(catalogName, schemaName string) (tables Tables, err error) {
err = a.client.Get(a.context, "/unity-catalog/tables/", map[string]string{
"catalog_name": catalogName,
"schema_name": schemaName,
}, &tables)
return
}

func (a TablesAPI) createTable(ti *TableInfo) error {
return a.client.Post(a.context, "/unity-catalog/tables", ti, ti)
}
Expand Down
4 changes: 2 additions & 2 deletions common/reflect_resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ func TestDataToReflectValueBypass(t *testing.T) {
func TestDataResource(t *testing.T) {
r := func() *schema.Resource {
var dto struct {
In string `json:"in"`
In string `json:"in"`
Out string `json:"out,omitempty" tf:"computed"`
}
return DataResource(&dto, func(ctx context.Context, c *DatabricksClient) error {
Expand All @@ -608,4 +608,4 @@ func TestDataResource(t *testing.T) {
d.Set("in", "fail")
diags = r.ReadContext(context.Background(), d, &DatabricksClient{})
assert.Len(t, diags, 1)
}
}
2 changes: 1 addition & 1 deletion common/version.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package common
import "context"

var (
version = "0.5.1"
version = "0.5.2"
// ResourceName is resource name without databricks_ prefix
ResourceName contextKey = 1
// Provider is the current instance of provider
Expand Down
3 changes: 3 additions & 0 deletions provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ func DatabricksProvider() *schema.Provider {
"databricks_aws_assume_role_policy": aws.DataAwsAssumeRolePolicy(),
"databricks_aws_bucket_policy": aws.DataAwsBucketPolicy(),
"databricks_clusters": clusters.DataSourceClusters(),
"databricks_catalogs": catalog.DataSourceCatalogs(),
"databricks_current_user": scim.DataSourceCurrentUser(),
"databricks_dbfs_file": storage.DataSourceDBFSFile(),
"databricks_dbfs_file_paths": storage.DataSourceDBFSFilePaths(),
Expand All @@ -48,7 +49,9 @@ func DatabricksProvider() *schema.Provider {
"databricks_node_type": clusters.DataSourceNodeType(),
"databricks_notebook": workspace.DataSourceNotebook(),
"databricks_notebook_paths": workspace.DataSourceNotebookPaths(),
"databricks_schemas": catalog.DataSourceSchemas(),
"databricks_spark_version": clusters.DataSourceSparkVersion(),
"databricks_tables": catalog.DataSourceTables(),
"databricks_user": scim.DataSourceUser(),
"databricks_zones": clusters.DataSourceClusterZones(),
},
Expand Down

0 comments on commit 8fb949c

Please sign in to comment.