diff --git a/catalog/permissions/permissions.go b/catalog/permissions/permissions.go index ff6b453793..2879a8d899 100644 --- a/catalog/permissions/permissions.go +++ b/catalog/permissions/permissions.go @@ -108,6 +108,7 @@ func (sm SecurableMapping) Id(d *schema.ResourceData) string { // Omitting provider as a reserved keyword var Mappings = SecurableMapping{ "catalog": catalog.SecurableType("catalog"), + "credential": catalog.SecurableType("credential"), "foreign_connection": catalog.SecurableType("connection"), "external_location": catalog.SecurableType("external_location"), "function": catalog.SecurableType("function"), diff --git a/catalog/resource_credential.go b/catalog/resource_credential.go new file mode 100644 index 0000000000..f0dc62de46 --- /dev/null +++ b/catalog/resource_credential.go @@ -0,0 +1,159 @@ +package catalog + +import ( + "context" + + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/terraform-provider-databricks/catalog/bindings" + "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var credentialSchema = common.StructToSchema(catalog.CredentialInfo{}, + func(m map[string]*schema.Schema) map[string]*schema.Schema { + var alofServiceCreds = []string{"aws_iam_role", "azure_managed_identity", "azure_service_principal"} + for _, cred := range alofServiceCreds { + common.CustomizeSchemaPath(m, cred).SetExactlyOneOf(alofServiceCreds) + } + + for _, required := range []string{"name", "purpose"} { + common.CustomizeSchemaPath(m, required).SetRequired() + } + + for _, computed := range []string{"id", "created_at", "created_by", "full_name", "isolation_mode", + "metastore_id", "owner", "updated_at", "updated_by", "used_for_managed_storage"} { + common.CustomizeSchemaPath(m, computed).SetComputed() + } + + common.MustSchemaPath(m, "aws_iam_role", "external_id").Computed = true + common.MustSchemaPath(m, "aws_iam_role", "unity_catalog_iam_arn").Computed = true + common.MustSchemaPath(m, "azure_managed_identity", "credential_id").Computed = true + common.MustSchemaPath(m, "azure_service_principal", "client_secret").Sensitive = true + + m["force_destroy"] = &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + } + m["force_update"] = &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + } + m["skip_validation"] = &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return old == "false" && new == "true" + }, + } + m["credential_id"] = &schema.Schema{ + Type: schema.TypeString, + Computed: true, + } + m["name"].DiffSuppressFunc = common.EqualFoldDiffSuppress + return m + }) + +func ResourceCredential() common.Resource { + return common.Resource{ + Schema: credentialSchema, + Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var create catalog.CreateCredentialRequest + common.DataToStructPointer(d, credentialSchema, &create) + cred, err := w.Credentials.CreateCredential(ctx, create) + if err != nil { + return err + } + d.SetId(cred.Name) + + // Update owner or isolation mode if it is provided + if !updateRequired(d, []string{"owner", "isolation_mode"}) { + return nil + } + + var update catalog.UpdateCredentialRequest + common.DataToStructPointer(d, credentialSchema, &update) + update.NameArg = d.Id() + _, err = w.Credentials.UpdateCredential(ctx, update) + if err != nil { + return err + } + + // Bind the current workspace if the credential is isolated, otherwise the read will fail + return bindings.AddCurrentWorkspaceBindings(ctx, d, w, cred.Name, catalog.UpdateBindingsSecurableTypeServiceCredential) + }, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + cred, err := w.Credentials.GetCredentialByNameArg(ctx, d.Id()) + if err != nil { + return err + } + d.Set("credential_id", cred.Id) + return common.StructToData(cred, credentialSchema, d) + }, + Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + force := d.Get("force_update").(bool) + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var updateCredRequest catalog.UpdateCredentialRequest + common.DataToStructPointer(d, credentialSchema, &updateCredRequest) + updateCredRequest.NameArg = d.Id() + updateCredRequest.Force = force + + if d.HasChange("owner") { + _, err = w.Credentials.UpdateCredential(ctx, catalog.UpdateCredentialRequest{ + NameArg: updateCredRequest.NameArg, + Owner: updateCredRequest.Owner, + }) + if err != nil { + return err + } + } + + if !d.HasChangeExcept("owner") { + return nil + } + if d.HasChange("read_only") { + updateCredRequest.ForceSendFields = append(updateCredRequest.ForceSendFields, "ReadOnly") + } + + updateCredRequest.Owner = "" + _, err = w.Credentials.UpdateCredential(ctx, updateCredRequest) + if err != nil { + if d.HasChange("owner") { + // Rollback + old, new := d.GetChange("owner") + _, rollbackErr := w.Credentials.UpdateCredential(ctx, catalog.UpdateCredentialRequest{ + NameArg: updateCredRequest.NameArg, + Owner: old.(string), + }) + if rollbackErr != nil { + return common.OwnerRollbackError(err, rollbackErr, old.(string), new.(string)) + } + } + return err + } + // Bind the current workspace if the credential is isolated, otherwise the read will fail + return bindings.AddCurrentWorkspaceBindings(ctx, d, w, updateCredRequest.NameArg, catalog.UpdateBindingsSecurableTypeServiceCredential) + }, + Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + force := d.Get("force_destroy").(bool) + w, err := c.WorkspaceClient() + if err != nil { + return err + } + return w.Credentials.DeleteCredential(ctx, catalog.DeleteCredentialRequest{ + NameArg: d.Id(), + Force: force, + }) + }, + } +} diff --git a/catalog/resource_credential_test.go b/catalog/resource_credential_test.go new file mode 100644 index 0000000000..c8f4573d4f --- /dev/null +++ b/catalog/resource_credential_test.go @@ -0,0 +1,158 @@ +package catalog + +import ( + "testing" + + "github.com/databricks/databricks-sdk-go/experimental/mocks" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/terraform-provider-databricks/qa" + "github.com/stretchr/testify/mock" +) + +func TestCredentialsCornerCases(t *testing.T) { + qa.ResourceCornerCases(t, ResourceCredential()) +} + +func TestCreateCredential(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockCredentialsAPI().EXPECT() + e.CreateCredential(mock.Anything, catalog.CreateCredentialRequest{ + Name: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + }, + Comment: "c", + Purpose: "SERVICE", + }).Return(&catalog.CredentialInfo{ + Name: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + }, + Purpose: "SERVICE", + Comment: "c", + }, nil) + e.GetCredentialByNameArg(mock.Anything, "a").Return(&catalog.CredentialInfo{ + Name: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + ExternalId: "123", + }, + Purpose: "SERVICE", + MetastoreId: "d", + Id: "1234-5678", + Owner: "f", + IsolationMode: "ISOLATION_MODE_ISOLATED", + }, nil) + }, + Resource: ResourceCredential(), + Create: true, + HCL: ` + name = "a" + aws_iam_role { + role_arn = "def" + } + purpose = "SERVICE" + comment = "c" + `, + }.ApplyAndExpectData(t, map[string]any{ + "aws_iam_role.0.external_id": "123", + "aws_iam_role.0.role_arn": "def", + "name": "a", + "purpose": "SERVICE", + }) +} + +func TestCreateIsolatedCredential(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockCredentialsAPI().EXPECT() + e.CreateCredential(mock.Anything, catalog.CreateCredentialRequest{ + Name: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + }, + Comment: "c", + Purpose: "SERVICE", + }).Return(&catalog.CredentialInfo{ + Name: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + ExternalId: "123", + }, + Purpose: "SERVICE", + MetastoreId: "d", + Id: "1234-5678", + Owner: "f", + }, nil) + e.UpdateCredential(mock.Anything, catalog.UpdateCredentialRequest{ + NameArg: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + }, + Comment: "c", + IsolationMode: "ISOLATION_MODE_ISOLATED", + }).Return(&catalog.CredentialInfo{ + Name: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + ExternalId: "123", + }, + Purpose: "SERVICE", + MetastoreId: "d", + Id: "1234-5678", + Owner: "f", + IsolationMode: "ISOLATION_MODE_ISOLATED", + }, nil) + w.GetMockMetastoresAPI().EXPECT().Current(mock.Anything).Return(&catalog.MetastoreAssignment{ + MetastoreId: "e", + WorkspaceId: 123456789101112, + }, nil) + w.GetMockWorkspaceBindingsAPI().EXPECT().UpdateBindings(mock.Anything, catalog.UpdateWorkspaceBindingsParameters{ + SecurableName: "a", + SecurableType: catalog.UpdateBindingsSecurableTypeServiceCredential, + Add: []catalog.WorkspaceBinding{ + { + WorkspaceId: int64(123456789101112), + BindingType: catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite, + }, + }, + }).Return(&catalog.WorkspaceBindingsResponse{ + Bindings: []catalog.WorkspaceBinding{ + { + WorkspaceId: int64(123456789101112), + BindingType: catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite, + }, + }, + }, nil) + e.GetCredentialByNameArg(mock.Anything, "a").Return(&catalog.CredentialInfo{ + Name: "a", + AwsIamRole: &catalog.AwsIamRole{ + RoleArn: "def", + ExternalId: "123", + }, + Purpose: "SERVICE", + MetastoreId: "d", + Id: "1234-5678", + Owner: "f", + IsolationMode: "ISOLATION_MODE_ISOLATED", + }, nil) + }, + Resource: ResourceCredential(), + Create: true, + HCL: ` + name = "a" + aws_iam_role { + role_arn = "def" + } + comment = "c" + purpose = "SERVICE" + isolation_mode = "ISOLATION_MODE_ISOLATED" + `, + }.ApplyAndExpectData(t, map[string]any{ + "aws_iam_role.0.external_id": "123", + "aws_iam_role.0.role_arn": "def", + "name": "a", + "isolation_mode": "ISOLATION_MODE_ISOLATED", + }) +} diff --git a/catalog/resource_grant_test.go b/catalog/resource_grant_test.go index 3c745438ea..d9a9cd4a31 100644 --- a/catalog/resource_grant_test.go +++ b/catalog/resource_grant_test.go @@ -462,7 +462,7 @@ func TestResourceGrantCreateNoSecurable(t *testing.T) { principal = "me" privileges = ["MODIFY", "SELECT"] `, - }.ExpectError(t, "invalid config supplied. [catalog] Missing required argument. [external_location] Missing required argument. [foreign_connection] Missing required argument. [function] Missing required argument. [metastore] Missing required argument. [model] Missing required argument. [pipeline] Missing required argument. [recipient] Missing required argument. [schema] Missing required argument. [share] Missing required argument. [storage_credential] Missing required argument. [table] Missing required argument. [volume] Missing required argument") + }.ExpectError(t, "invalid config supplied. [catalog] Missing required argument. [credential] Missing required argument. [external_location] Missing required argument. [foreign_connection] Missing required argument. [function] Missing required argument. [metastore] Missing required argument. [model] Missing required argument. [pipeline] Missing required argument. [recipient] Missing required argument. [schema] Missing required argument. [share] Missing required argument. [storage_credential] Missing required argument. [table] Missing required argument. [volume] Missing required argument") } func TestResourceGrantCreateOneSecurableOnly(t *testing.T) { diff --git a/docs/resources/credential.md b/docs/resources/credential.md new file mode 100644 index 0000000000..e612e9f89e --- /dev/null +++ b/docs/resources/credential.md @@ -0,0 +1,103 @@ +--- +subcategory: "Unity Catalog" +--- +# databricks_credential Resource + +-> This resource can only be used with a workspace-level provider. + +-> This feature is in [Public Preview](https://docs.databricks.com/release-notes/release-types.html). + +A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. Each credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. + +The type of credential to be created is determined by the `purpose` field, which should be either `SERVICE` or `STORAGE`. +The caller must be a metastore admin or have the metastore privilege `CREATE_STORAGE_CREDENTIAL` for storage credentials, or `CREATE_SERVICE_CREDENTIAL` for service credentials. The user who creates the credential can delegate ownership to another user or group to manage permissions on it + +On AWS, the IAM role for a credential requires a trust policy. See [documentation](https://docs.databricks.com/en/connect/unity-catalog/cloud-services/service-credentials.html#step-1-create-an-iam-role) for more details. The data source [databricks_aws_unity_catalog_assume_role_policy](../data-sources/aws_unity_catalog_assume_role_policy.md) can be used to create the necessary AWS Unity Catalog assume role policy. + +## Example Usage + +For AWS + +```hcl +resource "databricks_credential" "external" { + name = aws_iam_role.external_data_access.name + aws_iam_role { + role_arn = aws_iam_role.external_data_access.arn + } + purpose = "SERVICE" + comment = "Managed by TF" +} + +resource "databricks_grants" "external_creds" { + credential = databricks_credential.external.id + grant { + principal = "Data Engineers" + privileges = ["ACCESS"] + } +} +``` + +For Azure + +```hcl +resource "databricks_credential" "external_mi" { + name = "mi_credential" + azure_managed_identity { + access_connector_id = azurerm_databricks_access_connector.example.id + } + purpose = "SERVICE" + comment = "Managed identity credential managed by TF" +} + +resource "databricks_grants" "external_creds" { + credential = databricks_credential.external.id + grant { + principal = "Data Engineers" + privileges = ["ACCESS"] + } +} +``` + +## Argument Reference + +The following arguments are required: + +- `name` - Name of Credentials, which must be unique within the [databricks_metastore](metastore.md). Change forces creation of a new resource. +- `purpose` - Indicates the purpose of the credential. Can be `SERVICE` or `STORAGE`. +- `owner` - (Optional) Username/groupname/sp application_id of the credential owner. +- `read_only` - (Optional) Indicates whether the credential is only usable for read operations. Only applicable when purpose is `STORAGE`. +- `skip_validation` - (Optional) Suppress validation errors if any & force save the credential. +- `force_destroy` - (Optional) Delete credential regardless of its dependencies. +- `force_update` - (Optional) Update credential regardless of its dependents. +- `isolation_mode` - (Optional) Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be `ISOLATION_MODE_ISOLATED` or `ISOLATION_MODE_OPEN`. Setting the credential to `ISOLATION_MODE_ISOLATED` will automatically restrict access to only from the current workspace. + +`aws_iam_role` optional configuration block for credential details for AWS: + +- `role_arn` - The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form `arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF` + +`azure_managed_identity` optional configuration block for using managed identity as credential details for Azure (recommended over `azure_service_principal`): + +- `access_connector_id` - The Resource ID of the Azure Databricks Access Connector resource, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name`. + +- `managed_identity_id` - (Optional) The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. + +`azure_service_principal` optional configuration block to use service principal as credential details for Azure. Only applicable when purpose is `STORAGE` (Legacy): + +- `directory_id` - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application +- `application_id` - The application ID of the application registration within the referenced AAD tenant +- `client_secret` - The client secret generated for the above app ID in AAD. **This field is redacted on output** + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: + +- `id` - ID of this credential - same as the `name`. +- `credential_id` - Unique ID of the credential. + +## Import + +This resource can be imported by name: + +```bash +terraform import databricks_credential.this +``` diff --git a/docs/resources/grant.md b/docs/resources/grant.md index 9cd9bc0163..e5f562f7db 100644 --- a/docs/resources/grant.md +++ b/docs/resources/grant.md @@ -232,6 +232,28 @@ resource "databricks_grant" "udf_data_analysts" { } ``` +## Service credential grants + +See [databricks_grants Service credential grants](grants.md#service-credential-grants) for the list of privileges that apply to Service credentials. + +```hcl +resource "databricks_credential" "external" { + name = aws_iam_role.external_data_access.name + aws_iam_role { + role_arn = aws_iam_role.external_data_access.arn + } + purpose = "SERVICE" + comment = "Managed by TF" +} + +resource "databricks_grant" "external_creds" { + credential = databricks_credential.external.id + + principal = "Data Engineers" + privileges = ["ACCESS"] +} +``` + ## Storage credential grants See [databricks_grants Storage credential grants](grants.md#storage-credential-grants) for the list of privileges that apply to Storage credentials. diff --git a/docs/resources/grants.md b/docs/resources/grants.md index 7c1a223f3a..fc4b64687d 100644 --- a/docs/resources/grants.md +++ b/docs/resources/grants.md @@ -29,7 +29,7 @@ Unlike the [SQL specification](https://docs.databricks.com/sql/language-manual/s ## Metastore grants -You can grant `CREATE_CATALOG`, `CREATE_CONNECTION`, `CREATE_EXTERNAL_LOCATION`, `CREATE_PROVIDER`, `CREATE_RECIPIENT`, `CREATE_SHARE`, `CREATE_STORAGE_CREDENTIAL`, `MANAGE_ALLOWLIST`, `SET_SHARE_PERMISSION`, `USE_MARKETPLACE_ASSETS`, `USE_CONNECTION`, `USE_PROVIDER`, `USE_RECIPIENT` and `USE_SHARE` privileges to [databricks_metastore](metastore.md) assigned to the workspace. +You can grant `CREATE_CATALOG`, `CREATE_CLEAN_ROOM`, `CREATE_CONNECTION`, `CREATE_EXTERNAL_LOCATION`, `CREATE_PROVIDER`, `CREATE_RECIPIENT`, `CREATE_SHARE`, `CREATE_SERVICE_CREDENTIAL`, `CREATE_STORAGE_CREDENTIAL`, `SET_SHARE_PERMISSION`, `USE_MARKETPLACE_ASSETS`, `USE_PROVIDER`, `USE_RECIPIENT`, and `USE_SHARE` privileges to [databricks_metastore](metastore.md) assigned to the workspace. ```hcl resource "databricks_grants" "sandbox" { @@ -230,6 +230,29 @@ resource "databricks_grants" "udf" { } ``` +## Service credential grants + +You can grant `ALL_PRIVILEGES`, `ACCESS` and `CREATE_CONNECTION` privileges to [databricks_credential](credential.md) id specified in `credential` attribute: + +```hcl +resource "databricks_credential" "external" { + name = aws_iam_role.external_data_access.name + aws_iam_role { + role_arn = aws_iam_role.external_data_access.arn + } + purpose = "SERVICE" + comment = "Managed by TF" +} + +resource "databricks_grants" "external_creds" { + credential = databricks_credential.external.id + grant { + principal = "Data Engineers" + privileges = ["CREATE_CONNECTION"] + } +} +``` + ## Storage credential grants You can grant `ALL_PRIVILEGES`, `CREATE_EXTERNAL_LOCATION`, `CREATE_EXTERNAL_TABLE`, `READ_FILES` and `WRITE_FILES` privileges to [databricks_storage_credential](storage_credential.md) id specified in `storage_credential` attribute: diff --git a/docs/resources/storage_credential.md b/docs/resources/storage_credential.md index 87d90b853b..8f9f63a842 100644 --- a/docs/resources/storage_credential.md +++ b/docs/resources/storage_credential.md @@ -10,6 +10,8 @@ To work with external tables, Unity Catalog introduces two new objects to access - `databricks_storage_credential` represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal/managed identity for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential. - [databricks_external_location](external_location.md) are objects that combine a cloud storage path with a Storage Credential that can be used to access the location. +On AWS, the IAM role for a storage credential requires a trust policy. See [documentation](https://docs.databricks.com/en/connect/unity-catalog/cloud-storage/storage-credentials.html#step-1-create-an-iam-role) for more details. The data source [databricks_aws_unity_catalog_assume_role_policy](../data-sources/aws_unity_catalog_assume_role_policy.md) can be used to create the necessary AWS Unity Catalog assume role policy. + ## Example Usage For AWS diff --git a/internal/acceptance/credential_test.go b/internal/acceptance/credential_test.go new file mode 100644 index 0000000000..73a2b15b31 --- /dev/null +++ b/internal/acceptance/credential_test.go @@ -0,0 +1,42 @@ +package acceptance + +import ( + "testing" +) + +func TestUcAccCredential(t *testing.T) { + loadUcwsEnv(t) + if isAws(t) { + UnityWorkspaceLevel(t, Step{ + Template: ` + resource "databricks_credential" "external" { + name = "cred-{var.RANDOM}" + aws_iam_role { + role_arn = "{env.TEST_METASTORE_DATA_ACCESS_ARN}" + } + purpose = "SERVICE" + skip_validation = true + comment = "Managed by TF" + }`, + }) + } +} + +func TestAccCredentialOwner(t *testing.T) { + UnityAccountLevel(t, Step{ + Template: ` + resource "databricks_service_principal" "test_acc_storage_credential_owner" { + display_name = "test_acc_storage_credential_owner {var.RANDOM}" + } + + resource "databricks_credential" "test_acc_storage_credential_owner" { + name = "test_acc_storage_credential_owner-{var.RANDOM}" + owner = databricks_service_principal.test_acc_storage_credential_owner.application_id + purpose = "SERVICE" + aws_iam_role { + role_arn = "{env.TEST_METASTORE_DATA_ACCESS_ARN}" + } + } + `, + }) +} diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index 1426b411c4..8d9e40aec7 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -141,6 +141,7 @@ func DatabricksProvider(sdkV2Fallbacks ...pluginfw.SdkV2FallbackOption) *schema. "databricks_budget": finops.ResourceBudget().ToResource(), "databricks_catalog": catalog.ResourceCatalog().ToResource(), "databricks_catalog_workspace_binding": catalog.ResourceCatalogWorkspaceBinding().ToResource(), + "databricks_credential": catalog.ResourceCredential().ToResource(), "databricks_custom_app_integration": apps.ResourceCustomAppIntegration().ToResource(), "databricks_connection": catalog.ResourceConnection().ToResource(), "databricks_cluster": clusters.ResourceCluster().ToResource(),