From 939cddee73dcd34a55c0557ccb707dbfef073113 Mon Sep 17 00:00:00 2001 From: Alex Ott Date: Fri, 17 May 2024 10:35:35 +0200 Subject: [PATCH] Fix documentation for `databricks_storage_credential` and `databricks_external_location` data sources Changes include: * Documented right structure returned by these data sources - the nested structures weren't mentioned there at all. * Fixed examples for both data sources * Added `id` attribute for easier reference to the data sources * Reformatted the rest of doc examples --- catalog/data_external_location.go | 2 + catalog/data_external_location_test.go | 5 +- catalog/data_storage_credential.go | 2 + catalog/data_storage_credential_test.go | 4 +- docs/data-sources/external_location.md | 29 +++++++---- docs/data-sources/mlflow_experiment.md | 2 +- docs/data-sources/storage_credential.md | 48 +++++++++---------- .../compliance_security_profile_setting.md | 4 +- .../enhanced_security_monitoring_setting.md | 2 +- 9 files changed, 55 insertions(+), 43 deletions(-) diff --git a/catalog/data_external_location.go b/catalog/data_external_location.go index acffd2a3c8..cc97683e02 100644 --- a/catalog/data_external_location.go +++ b/catalog/data_external_location.go @@ -10,6 +10,7 @@ import ( func DataSourceExternalLocation() common.Resource { type ExternalLocationByID struct { + Id string `json:"id,omitempty" tf:"computed"` Name string `json:"name"` ExternalLocation *catalog.ExternalLocationInfo `json:"external_location_info,omitempty" tf:"computed" ` } @@ -19,6 +20,7 @@ func DataSourceExternalLocation() common.Resource { return err } data.ExternalLocation = location + data.Id = location.Name return nil }) } diff --git a/catalog/data_external_location_test.go b/catalog/data_external_location_test.go index adf2199119..35543714c6 100644 --- a/catalog/data_external_location_test.go +++ b/catalog/data_external_location_test.go @@ -31,8 +31,9 @@ func TestExternalLocationDataVerify(t *testing.T) { name = "abc" `, }.ApplyAndExpectData(t, map[string]any{ - "external_location_info.0.owner": "admin", - "external_location_info.0.url": "s3://test", + "id": "abc", + "external_location_info.0.owner": "admin", + "external_location_info.0.url": "s3://test", "external_location_info.0.credential_name": "test", "external_location_info.0.read_only": true, }) diff --git a/catalog/data_storage_credential.go b/catalog/data_storage_credential.go index 2b54a85da2..f57a38f026 100644 --- a/catalog/data_storage_credential.go +++ b/catalog/data_storage_credential.go @@ -10,6 +10,7 @@ import ( func DataSourceStorageCredential() common.Resource { type AccountMetastoreByID struct { + Id string `json:"id,omitempty" tf:"computed"` Name string `json:"name"` StorageCredential *catalog.StorageCredentialInfo `json:"storage_credential_info,omitempty" tf:"computed" ` } @@ -19,6 +20,7 @@ func DataSourceStorageCredential() common.Resource { return err } data.StorageCredential = credential + data.Id = credential.Id return nil }) } diff --git a/catalog/data_storage_credential_test.go b/catalog/data_storage_credential_test.go index 56663f2157..551b385d27 100644 --- a/catalog/data_storage_credential_test.go +++ b/catalog/data_storage_credential_test.go @@ -15,6 +15,7 @@ func TestStorageCredentialDataVerify(t *testing.T) { e := w.GetMockStorageCredentialsAPI().EXPECT() e.GetByName(mock.Anything, "abc").Return( &catalog.StorageCredentialInfo{ + Id: "1234", Name: "abc", Owner: "admin", AwsIamRole: &catalog.AwsIamRoleResponse{ @@ -37,7 +38,8 @@ func TestStorageCredentialDataVerify(t *testing.T) { name = "abc" `, }.ApplyAndExpectData(t, map[string]any{ - "storage_credential_info.0.owner": "admin", + "id": "1234", + "storage_credential_info.0.owner": "admin", "storage_credential_info.0.aws_iam_role.0.role_arn": "test", "storage_credential_info.0.azure_managed_identity.0.access_connector_id": "test", "storage_credential_info.0.databricks_gcp_service_account.0.email": "test", diff --git a/docs/data-sources/external_location.md b/docs/data-sources/external_location.md index 20d4faf29e..78ecde2ca7 100644 --- a/docs/data-sources/external_location.md +++ b/docs/data-sources/external_location.md @@ -17,24 +17,33 @@ data "databricks_external_location" "this" { } output "created_by" { - value = data.databricks_external_location.this.created_by - sensitive = false + value = data.databricks_external_location.this.external_location_info[0].created_by } ``` ## Argument Reference -* `name` - (Required) The name of the storage credential +* `name` - (Required) The name of the external location ## Attribute Reference -* `url` - Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). -* `credential_name` - Name of the [databricks_storage_credential](storage_credential.md) to use with this external location. -* `owner` - Username/groupname/sp application_id of the external location owner. -* `comment` - User-supplied comment. -* `read_only` - Indicates whether the external location is read-only. -* `access_point` - The ARN of the s3 access point to use with the external location (AWS). -* `encryption_details` - The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS). +This data source exports the following attributes: + +* `id` - external location ID - same as name. +* `external_location_info` - array of objects with information about external location: + * `url` - Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). + * `credential_name` - Name of the [databricks_storage_credential](storage_credential.md) to use with this external location. + * `credential_id` - Unique ID of storage credential. + * `metastore_id` - Unique identifier of the parent Metastore. + * `owner` - Username/groupname/sp application_id of the external location owner. + * `comment` - User-supplied comment. + * `read_only` - Indicates whether the external location is read-only. + * `created_at` - Time at which this catalog was created, in epoch milliseconds. + * `created_by` - Username of catalog creator. + * `updated_at` - Time at which this catalog was last modified, in epoch milliseconds. + * `updated_by` - Username of user who last modified catalog. + * `access_point` - The ARN of the s3 access point to use with the external location (AWS). + * `encryption_details` - The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS). ## Related Resources diff --git a/docs/data-sources/mlflow_experiment.md b/docs/data-sources/mlflow_experiment.md index b8094b6e89..342b3e36aa 100644 --- a/docs/data-sources/mlflow_experiment.md +++ b/docs/data-sources/mlflow_experiment.md @@ -35,4 +35,4 @@ This data source exports the following attributes: * `last_update_time` - Last update time in unix time stamp. * `lifecycle_stage` - Current life cycle stage of the experiment: `active` or `deleted`. * `name` - Path to experiment. -* `tags` - Additional metadata key-value pairs. \ No newline at end of file +* `tags` - Additional metadata key-value pairs. diff --git a/docs/data-sources/storage_credential.md b/docs/data-sources/storage_credential.md index 4f480b2080..631e264e66 100644 --- a/docs/data-sources/storage_credential.md +++ b/docs/data-sources/storage_credential.md @@ -17,8 +17,7 @@ data "databricks_storage_credential" "this" { } output "created_by" { - value = data.databricks_storage_credential.this.created_by - sensitive = false + value = data.databricks_storage_credential.this.storage_credential_info[0].created_by } ``` @@ -30,30 +29,27 @@ output "created_by" { This data source exports the following attributes: -* `metastore_id` - Unique identifier of the parent Metastore. -* `owner` - Username/groupname/sp application_id of the storage credential owner. -* `read_only` - Indicates whether the storage credential is only usable for read operations. - -`aws_iam_role` credential details for AWS: - -* `role_arn` - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form `arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF` -* `external_id` (output only) - The external ID used in role assumption to prevent confused deputy problem. -* `unity_catalog_iam_arn` (output only) - The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role. - -`azure_managed_identity` managed identity credential details for Azure - -* `access_connector_id` - The Resource ID of the Azure Databricks Access Connector resource, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name`. - -* `managed_identity_id` - The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. - -`databricks_gcp_service_account` credential details for GCP: - -* `email` - The email of the GCP service account created, to be granted access to relevant buckets. - -`azure_service_principal` service principal credential details for Azure: - -* `directory_id` - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application -* `application_id` - The application ID of the application registration within the referenced AAD tenant +* `id` - Unique ID of storage credential. +* `storage_credential_info` - array of objects with information about storage credential. + * `metastore_id` - Unique identifier of the parent Metastore. + * `owner` - Username/groupname/sp application_id of the storage credential owner. + * `read_only` - Indicates whether the storage credential is only usable for read operations. + * `created_at` - Time at which this catalog was created, in epoch milliseconds. + * `created_by` - Username of catalog creator. + * `updated_at` - Time at which this catalog was last modified, in epoch milliseconds. + * `updated_by` - Username of user who last modified catalog. + * `aws_iam_role` credential details for AWS: + * `role_arn` - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form `arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF` + * `external_id` (output only) - The external ID used in role assumption to prevent confused deputy problem. + * `unity_catalog_iam_arn` (output only) - The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role. + * `azure_managed_identity` managed identity credential details for Azure + * `access_connector_id` - The Resource ID of the Azure Databricks Access Connector resource, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name`. + * `managed_identity_id` - The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`. + * `azure_service_principal` service principal credential details for Azure: + * `directory_id` - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application + * `application_id` - The application ID of the application registration within the referenced AAD tenant + * `databricks_gcp_service_account` credential details for GCP: + * `email` - The email of the GCP service account created, to be granted access to relevant buckets. ## Related Resources diff --git a/docs/resources/compliance_security_profile_setting.md b/docs/resources/compliance_security_profile_setting.md index 7de715dec1..524f5673a2 100644 --- a/docs/resources/compliance_security_profile_setting.md +++ b/docs/resources/compliance_security_profile_setting.md @@ -15,7 +15,7 @@ turned off. This setting can NOT be disabled once it is enabled. ```hcl resource "databricks_compliance_security_profile_workspace_setting" "this" { compliance_security_profile_workspace { - is_enabled = true + is_enabled = true compliance_standards = ["HIPAA", "FEDRAMP_MODERATE"] } } @@ -34,4 +34,4 @@ This resource can be imported by predefined name `global`: ```bash terraform import databricks_compliance_security_profile_workspace_setting.this global -``` \ No newline at end of file +``` diff --git a/docs/resources/enhanced_security_monitoring_setting.md b/docs/resources/enhanced_security_monitoring_setting.md index c19a167680..ce8b4f25d9 100644 --- a/docs/resources/enhanced_security_monitoring_setting.md +++ b/docs/resources/enhanced_security_monitoring_setting.md @@ -33,4 +33,4 @@ This resource can be imported by predefined name `global`: ```bash terraform import databricks_enhanced_security_monitoring_workspace_setting.this global -``` \ No newline at end of file +```