Skip to content

Commit

Permalink
Merge branch 'main' into duplicate-all-legacy-structures
Browse files Browse the repository at this point in the history
  • Loading branch information
mgyucht committed Dec 18, 2024
2 parents a8e00aa + 164a77e commit abe0620
Show file tree
Hide file tree
Showing 16 changed files with 1,591 additions and 202 deletions.
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
7016dcbf2e011459416cf408ce21143bcc4b3a25
a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
32 changes: 32 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,37 @@
# Version changelog

## [Release] Release v1.62.0

### New Features and Improvements

* Allow to use GCP SA in `databricks_credential` (storage only) ([#4302](https://github.com/databricks/terraform-provider-databricks/pull/4302)).


### Bug Fixes

* Bump Golang x/crypto to 0.31 ([#4319](https://github.com/databricks/terraform-provider-databricks/pull/4319)).


### Internal Changes

* Generate models with correctly computed annotations ([#4316](https://github.com/databricks/terraform-provider-databricks/pull/4316)).
* Update Go SDK to 0.54.0 ([#4328](https://github.com/databricks/terraform-provider-databricks/pull/4328)).


### Dependency Updates

* Bump github.com/hashicorp/terraform-plugin-framework-validators from 0.15.0 to 0.16.0 ([#4320](https://github.com/databricks/terraform-provider-databricks/pull/4320)).


### Exporter

* Correctly handle DB-managed UC objects ([#4323](https://github.com/databricks/terraform-provider-databricks/pull/4323)).
* Emit WSFS/UC files from `parameters` in the jobs ([#4318](https://github.com/databricks/terraform-provider-databricks/pull/4318)).
* Improve support of `databricks_model_serving` ([#4324](https://github.com/databricks/terraform-provider-databricks/pull/4324)).
* add support for `databricks_credential` ([#4292](https://github.com/databricks/terraform-provider-databricks/pull/4292)).
* export `databricks_repo` for Git Folders outside of `/Repos` ([#4308](https://github.com/databricks/terraform-provider-databricks/pull/4308)).


## [Release] Release v1.61.0

### New Features and Improvements
Expand Down
4 changes: 2 additions & 2 deletions catalog/resource_credential.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ func ResourceCredential() common.Resource {
}

// Bind the current workspace if the credential is isolated, otherwise the read will fail
return bindings.AddCurrentWorkspaceBindings(ctx, d, w, cred.Name, catalog.UpdateBindingsSecurableTypeServiceCredential)
return bindings.AddCurrentWorkspaceBindings(ctx, d, w, cred.Name, catalog.UpdateBindingsSecurableTypeCredential)
},
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
w, err := c.WorkspaceClient()
Expand Down Expand Up @@ -155,7 +155,7 @@ func ResourceCredential() common.Resource {
return err
}
// Bind the current workspace if the credential is isolated, otherwise the read will fail
return bindings.AddCurrentWorkspaceBindings(ctx, d, w, updateCredRequest.NameArg, catalog.UpdateBindingsSecurableTypeServiceCredential)
return bindings.AddCurrentWorkspaceBindings(ctx, d, w, updateCredRequest.NameArg, catalog.UpdateBindingsSecurableTypeCredential)
},
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
force := d.Get("force_destroy").(bool)
Expand Down
2 changes: 1 addition & 1 deletion catalog/resource_credential_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ func TestCreateIsolatedCredential(t *testing.T) {
}, nil)
w.GetMockWorkspaceBindingsAPI().EXPECT().UpdateBindings(mock.Anything, catalog.UpdateWorkspaceBindingsParameters{
SecurableName: "a",
SecurableType: catalog.UpdateBindingsSecurableTypeServiceCredential,
SecurableType: catalog.UpdateBindingsSecurableTypeCredential,
Add: []catalog.WorkspaceBinding{
{
WorkspaceId: int64(123456789101112),
Expand Down
13 changes: 8 additions & 5 deletions catalog/resource_workspace_binding.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,14 @@ func ResourceWorkspaceBinding() common.Resource {
Optional: true,
Default: "catalog",
}
common.CustomizeSchemaPath(m, "securable_type").SetValidateFunc(validation.StringInSlice([]string{"catalog", "external_location", "storage_credential"}, false))
common.CustomizeSchemaPath(m, "binding_type").SetDefault(catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite).SetValidateFunc(validation.StringInSlice([]string{
string(catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite),
string(catalog.WorkspaceBindingBindingTypeBindingTypeReadOnly),
}, false))
common.CustomizeSchemaPath(m, "securable_type").SetValidateFunc(validation.StringInSlice([]string{
"catalog", "external_location", "storage_credential", "credential"}, false))
common.CustomizeSchemaPath(m, "binding_type").SetDefault(
catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite).SetValidateFunc(
validation.StringInSlice([]string{
string(catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite),
string(catalog.WorkspaceBindingBindingTypeBindingTypeReadOnly),
}, false))
return m
},
)
Expand Down
2 changes: 1 addition & 1 deletion common/version.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package common
import "context"

var (
version = "1.61.0"
version = "1.62.0"
// ResourceName is resource name without databricks_ prefix
ResourceName contextKey = 1
// Provider is the current instance of provider
Expand Down
4 changes: 2 additions & 2 deletions docs/resources/workspace_binding.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ If you use workspaces to isolate user data access, you may want to limit access

By default, Databricks assigns the securable to all workspaces attached to the current metastore. By using `databricks_workspace_binding`, the securable will be unassigned from all workspaces and only assigned explicitly using this resource.

-> To use this resource the securable must have its isolation mode set to `ISOLATED` (for [databricks_catalog](catalog.md)) or `ISOLATION_MODE_ISOLATED` (for (for [databricks_external_location](external_location.md) or [databricks_storage_credential](storage_credential.md)) for the `isolation_mode` attribute. Alternatively, the isolation mode can be set using the UI or API by following [this guide](https://docs.databricks.com/data-governance/unity-catalog/create-catalogs.html#configuration), [this guide](https://docs.databricks.com/en/connect/unity-catalog/external-locations.html#workspace-binding) or [this guide](https://docs.databricks.com/en/connect/unity-catalog/storage-credentials.html#optional-assign-a-storage-credential-to-specific-workspaces).
-> To use this resource the securable must have its isolation mode set to `ISOLATED` (for [databricks_catalog](catalog.md)) or `ISOLATION_MODE_ISOLATED` (for (for [databricks_external_location](external_location.md), [databricks_storage_credential](storage_credential.md) or [databricks_credential](credential.md)) for the `isolation_mode` attribute. Alternatively, the isolation mode can be set using the UI or API by following [this guide](https://docs.databricks.com/data-governance/unity-catalog/create-catalogs.html#configuration), [this guide](https://docs.databricks.com/en/connect/unity-catalog/external-locations.html#workspace-binding) or [this guide](https://docs.databricks.com/en/connect/unity-catalog/storage-credentials.html#optional-assign-a-storage-credential-to-specific-workspaces).

-> If the securable's isolation mode was set to `ISOLATED` using Terraform then the securable will have been automatically bound to the workspace it was created from.

Expand All @@ -33,7 +33,7 @@ The following arguments are required:

* `workspace_id` - ID of the workspace. Change forces creation of a new resource.
* `securable_name` - Name of securable. Change forces creation of a new resource.
* `securable_type` - Type of securable. Can be `catalog`, `external-location` or `storage-credential`. Default to `catalog`. Change forces creation of a new resource.
* `securable_type` - Type of securable. Can be `catalog`, `external_location`, `storage_credential` or `credential`. Default to `catalog`. Change forces creation of a new resource.
* `binding_type` - (Optional) Binding mode. Default to `BINDING_TYPE_READ_WRITE`. Possible values are `BINDING_TYPE_READ_ONLY`, `BINDING_TYPE_READ_WRITE`.

## Import
Expand Down
33 changes: 24 additions & 9 deletions exporter/importables.go
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,8 @@ var resourcesMap map[string]importable = map[string]importable{
Ignore: generateIgnoreObjectWithEmptyAttributeValue("databricks_instance_pool", "instance_pool_name"),
},
"databricks_instance_profile": {
Service: "access",
Service: "access",
WorkspaceLevel: true,
Name: func(ic *importContext, d *schema.ResourceData) string {
arn := d.Get("instance_profile_arn").(string)
splits := strings.Split(arn, "/")
Expand Down Expand Up @@ -774,12 +775,8 @@ var resourcesMap map[string]importable = map[string]importable{
return d.Get("name").(string)
},
List: func(ic *importContext) error {
w, err := ic.Client.WorkspaceClient()
if err != nil {
return err
}
builtInClusterPolicies := ic.getBuiltinPolicyFamilies()
it := w.ClusterPolicies.List(ic.Context, compute.ListClusterPoliciesRequest{})
it := ic.workspaceClient.ClusterPolicies.List(ic.Context, compute.ListClusterPoliciesRequest{})
i := 0
for it.HasNext(ic.Context) {
policy, err := it.Next(ic.Context)
Expand Down Expand Up @@ -2390,6 +2387,7 @@ var resourcesMap map[string]importable = map[string]importable{
}
if se.ExternalModel.OpenaiConfig != nil {
ic.emitSecretsFromSecretPathString(se.ExternalModel.OpenaiConfig.OpenaiApiKey)
ic.emitSecretsFromSecretPathString(se.ExternalModel.OpenaiConfig.MicrosoftEntraClientSecret)
}
if se.ExternalModel.PalmConfig != nil {
ic.emitSecretsFromSecretPathString(se.ExternalModel.PalmConfig.PalmApiKey)
Expand All @@ -2404,6 +2402,15 @@ var resourcesMap map[string]importable = map[string]importable{
ID: mse.Config.AutoCaptureConfig.CatalogName + "." + mse.Config.AutoCaptureConfig.SchemaName,
})
}
// TODO: add auto-capture for AI Gateway
if mse.AiGateway != nil && mse.AiGateway.InferenceTableConfig != nil &&
mse.AiGateway.InferenceTableConfig.CatalogName != "" &&
mse.AiGateway.InferenceTableConfig.SchemaName != "" {
ic.Emit(&resource{
Resource: "databricks_schema",
ID: mse.AiGateway.InferenceTableConfig.CatalogName + "." + mse.AiGateway.InferenceTableConfig.SchemaName,
})
}
return nil
},
Ignore: func(ic *importContext, r *resource) bool {
Expand Down Expand Up @@ -2441,14 +2448,22 @@ var resourcesMap map[string]importable = map[string]importable{
extModelBlockCoordinate := strings.Replace(pathString, ".scale_to_zero_enabled", ".external_model", 1)
return d.Get(extModelBlockCoordinate+".#").(int) == 0
}
return pathString == "config.0.auto_capture_config.0.enabled"
return pathString == "config.0.auto_capture_config.0.enabled" || pathString == "ai_gateway.0.inference_table_config.0.enabled"
},
Depends: []reference{
{Path: "config.served_entities.entity_name", Resource: "databricks_registered_model"},
{Path: "config.served_entities.instance_profile_arn", Resource: "databricks_instance_profile",
Match: "instance_profile_arn"},
{Path: "config.auto_capture_config.catalog_name", Resource: "databricks_catalog"},
{Path: "config.auto_capture_config.schema_name", Resource: "databricks_schema", Match: "name",
IsValidApproximation: createIsMatchingCatalogAndSchema("config.0.auto_capture_config.0.catalog_name", "config.0.auto_capture_config.0.schema_name"),
SkipDirectLookup: true},
IsValidApproximation: createIsMatchingCatalogAndSchema("config.0.auto_capture_config.0.catalog_name",
"config.0.auto_capture_config.0.schema_name"),
SkipDirectLookup: true},
{Path: "ai_gateway.inference_table_config.catalog_name", Resource: "databricks_catalog"},
{Path: "ai_gateway.inference_table_config.schema_name", Resource: "databricks_schema", Match: "name",
IsValidApproximation: createIsMatchingCatalogAndSchema("ai_gateway.0.inference_table_config.0.catalog_name",
"ai_gateway.0.inference_table_config.0.schema_name"),
SkipDirectLookup: true},
},
},
"databricks_mlflow_webhook": {
Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ go 1.22.0
toolchain go1.22.5

require (
github.com/databricks/databricks-sdk-go v0.53.0
github.com/databricks/databricks-sdk-go v0.54.0
github.com/golang-jwt/jwt/v4 v4.5.1
github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320
github.com/hashicorp/hcl v1.0.0
Expand Down
2 changes: 2 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53E
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/databricks/databricks-sdk-go v0.53.0 h1:rZMXaTC3HNKZt+m4C4I/dY3EdZj+kl/sVd/Kdq55Qfo=
github.com/databricks/databricks-sdk-go v0.53.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/databricks/databricks-sdk-go v0.54.0 h1:L8gsA3NXs+uYU3QtW/OUgjxMQxOH24k0MT9JhB3zLlM=
github.com/databricks/databricks-sdk-go v0.54.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
Expand Down
28 changes: 22 additions & 6 deletions internal/acceptance/workspace_binding_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,16 @@ func workspaceBindingTemplateWithWorkspaceId(workspaceId string) string {
}
isolation_mode = "ISOLATION_MODE_ISOLATED"
}
resource "databricks_credential" "credential" {
name = "service-cred-{var.RANDOM}"
aws_iam_role {
role_arn = "{env.TEST_METASTORE_DATA_ACCESS_ARN}"
}
purpose = "SERVICE"
skip_validation = true
isolation_mode = "ISOLATION_MODE_ISOLATED"
}
resource "databricks_external_location" "some" {
name = "external-{var.RANDOM}"
Expand All @@ -40,28 +50,34 @@ func workspaceBindingTemplateWithWorkspaceId(workspaceId string) string {
resource "databricks_workspace_binding" "dev" {
catalog_name = databricks_catalog.dev.name
workspace_id = %s
workspace_id = %[1]s
}
resource "databricks_workspace_binding" "prod" {
securable_name = databricks_catalog.prod.name
securable_type = "catalog"
workspace_id = %s
workspace_id = %[1]s
binding_type = "BINDING_TYPE_READ_ONLY"
}
resource "databricks_workspace_binding" "ext" {
securable_name = databricks_external_location.some.id
securable_type = "external_location"
workspace_id = %s
workspace_id = %[1]s
}
resource "databricks_workspace_binding" "cred" {
securable_name = databricks_storage_credential.external.id
securable_type = "storage_credential"
workspace_id = %s
}
`, workspaceId, workspaceId, workspaceId, workspaceId)
workspace_id = %[1]s
}
resource "databricks_workspace_binding" "service_cred" {
securable_name = databricks_credential.credential.id
securable_type = "credential"
workspace_id = %[1]s
}
`, workspaceId)
}

func TestUcAccWorkspaceBindingToOtherWorkspace(t *testing.T) {
Expand Down
Loading

0 comments on commit abe0620

Please sign in to comment.