diff --git a/docs/guides/unity-catalog.md b/docs/guides/unity-catalog.md index 52c187ff7c..61b1bff7d4 100644 --- a/docs/guides/unity-catalog.md +++ b/docs/guides/unity-catalog.md @@ -262,7 +262,7 @@ resource "aws_iam_policy" "external_data_access" { resource "aws_iam_role" "external_data_access" { name = local.uc_iam_role - assume_role_policy = data.aws_iam_policy_document.this.json + assume_role_policy = data.databricks_aws_unity_catalog_assume_role_policy.this.json managed_policy_arns = [aws_iam_policy.external_data_access.arn] tags = merge(var.tags, { Name = "${local.prefix}-unity-catalog external access IAM role" diff --git a/docs/resources/alert.md b/docs/resources/alert.md index f15bdaf116..478090892b 100644 --- a/docs/resources/alert.md +++ b/docs/resources/alert.md @@ -13,15 +13,15 @@ resource "databricks_directory" "shared_dir" { } # This will be replaced with new databricks_query resource -resource "databricks_sql_query" "this" { - data_source_id = databricks_sql_endpoint.example.data_source_id - name = "My Query Name" - query = "SELECT 42 as value" - parent = "folders/${databricks_directory.shared_dir.object_id}" +resource "databricks_query" "this" { + warehouse_id = databricks_sql_endpoint.example.id + display_name = "My Query Name" + query_text = "SELECT 42 as value" + parent_path = databricks_directory.shared_dir.path } resource "databricks_alert" "alert" { - query_id = databricks_sql_query.this.id + query_id = databricks_query.this.id display_name = "TF new alert" parent_path = databricks_directory.shared_dir.path condition { @@ -77,7 +77,11 @@ In addition to all the arguments above, the following attributes are exported: ## Migrating from `databricks_sql_alert` resource -Under the hood, the new resource uses the same data as the `databricks_sql_alert`, but is exposed via a different API. This means that we can migrate existing alerts without recreating them. This operation is done in few steps: +Under the hood, the new resource uses the same data as the `databricks_sql_alert`, but is exposed via a different API. This means that we can migrate existing alerts without recreating them. + +-> It's also recommended to migrate to the `databricks_query` resource - see [databricks_query](query.md) for more details. + +This operation is done in few steps: * Record the ID of existing `databricks_sql_alert`, for example, by executing the `terraform state show databricks_sql_alert.alert` command. * Create the code for the new implementation by performing the following changes: @@ -109,7 +113,7 @@ we'll have a new resource defined as: ```hcl resource "databricks_alert" "alert" { - query_id = databricks_sql_query.this.id + query_id = databricks_query.this.id display_name = "My Alert" parent_path = databricks_directory.shared_dir.path condition { @@ -179,6 +183,20 @@ resource "databricks_permissions" "alert_usage" { } ``` +## Access Control + +[databricks_permissions](permissions.md#sql-alert-usage) can control which groups or individual users can *Manage*, *Edit*, *Run* or *View* individual alerts. + +```hcl +resource "databricks_permissions" "alert_usage" { + sql_alert_id = databricks_alert.alert.id + access_control { + group_name = "users" + permission_level = "CAN_RUN" + } +} +``` + ## Import This resource can be imported using alert ID: @@ -191,6 +209,6 @@ terraform import databricks_alert.this The following resources are often used in the same context: -* [databricks_sql_query](sql_query.md) to manage Databricks SQL [Queries](https://docs.databricks.com/sql/user/queries/index.html). -* [databricks_sql_endpoint](sql_endpoint.md) to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). +* [databricks_query](query.md) to manage [Databricks SQL Queries](https://docs.databricks.com/sql/user/queries/index.html). +* [databricks_sql_endpoint](sql_endpoint.md) to manage [Databricks SQL Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). * [databricks_directory](directory.md) to manage directories in [Databricks Workpace](https://docs.databricks.com/workspace/workspace-objects.html). diff --git a/docs/resources/custom_app_integration.md b/docs/resources/custom_app_integration.md index ffd2b79eb9..01b3c99ff4 100644 --- a/docs/resources/custom_app_integration.md +++ b/docs/resources/custom_app_integration.md @@ -15,7 +15,7 @@ resource "databricks_custom_app_integration" "this" { redirect_urls = ["https://example.com"] scopes = ["all-apis"] token_access_policy { - access_token_ttl_in_minutes = %s + access_token_ttl_in_minutes = 15 refresh_token_ttl_in_minutes = 30 } } diff --git a/docs/resources/job.md b/docs/resources/job.md index efc6bd8ca7..e239066a44 100644 --- a/docs/resources/job.md +++ b/docs/resources/job.md @@ -224,14 +224,14 @@ One of the `query`, `dashboard` or `alert` needs to be provided. * `warehouse_id` - (Required) ID of the (the [databricks_sql_endpoint](sql_endpoint.md)) that will be used to execute the task. Only Serverless & Pro warehouses are supported right now. * `parameters` - (Optional) (Map) parameters to be used for each run of this task. The SQL alert task does not support custom parameters. -* `query` - (Optional) block consisting of single string field: `query_id` - identifier of the Databricks SQL Query ([databricks_sql_query](sql_query.md)). +* `query` - (Optional) block consisting of single string field: `query_id` - identifier of the Databricks Query ([databricks_query](query.md)). * `dashboard` - (Optional) block consisting of following fields: * `dashboard_id` - (Required) (String) identifier of the Databricks SQL Dashboard [databricks_sql_dashboard](sql_dashboard.md). * `subscriptions` - (Optional) a list of subscription blocks consisting out of one of the required fields: `user_name` for user emails or `destination_id` - for Alert destination's identifier. * `custom_subject` - (Optional) string specifying a custom subject of email sent. * `pause_subscriptions` - (Optional) flag that specifies if subscriptions are paused or not. * `alert` - (Optional) block consisting of following fields: - * `alert_id` - (Required) (String) identifier of the Databricks SQL Alert. + * `alert_id` - (Required) (String) identifier of the Databricks Alert ([databricks_alert](alert.md)). * `subscriptions` - (Optional) a list of subscription blocks consisting out of one of the required fields: `user_name` for user emails or `destination_id` - for Alert destination's identifier. * `pause_subscriptions` - (Optional) flag that specifies if subscriptions are paused or not. * `file` - (Optional) block consisting of single string fields: @@ -372,7 +372,6 @@ This block describes the queue settings of the job: * `periodic` - (Optional) configuration block to define a trigger for Periodic Triggers consisting of the following attributes: * `interval` - (Required) Specifies the interval at which the job should run. This value is required. * `unit` - (Required) Options are {"DAYS", "HOURS", "WEEKS"}. - * `file_arrival` - (Optional) configuration block to define a trigger for [File Arrival events](https://learn.microsoft.com/en-us/azure/databricks/workflows/jobs/file-arrival-triggers) consisting of following attributes: * `url` - (Required) URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location. Please note that the URL must have a trailing slash character (`/`). * `min_time_between_triggers_seconds` - (Optional) If set, the trigger starts a run only after the specified amount of time passed since the last time the trigger fired. The minimum allowed value is 60 seconds. diff --git a/docs/resources/pipeline.md b/docs/resources/pipeline.md index 76a60d75db..28ea211616 100644 --- a/docs/resources/pipeline.md +++ b/docs/resources/pipeline.md @@ -80,7 +80,8 @@ The following arguments are supported: * `photon` - A flag indicating whether to use Photon engine. The default value is `false`. * `serverless` - An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires `catalog` to be set, as it could be used only with Unity Catalog. * `catalog` - The name of catalog in Unity Catalog. *Change of this parameter forces recreation of the pipeline.* (Conflicts with `storage`). -* `target` - The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI. +* `target` - (Optional, String, Conflicts with `schema`) The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI. +* `schema` - (Optional, String, Conflicts with `target`) The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode. * `edition` - optional name of the [product edition](https://docs.databricks.com/data-engineering/delta-live-tables/delta-live-tables-concepts.html#editions). Supported values are: `CORE`, `PRO`, `ADVANCED` (default). Not required when `serverless` is set to `true`. * `channel` - optional name of the release channel for Spark version used by DLT pipeline. Supported values are: `CURRENT` (default) and `PREVIEW`. * `budget_policy_id` - optional string specifying ID of the budget policy for this DLT pipeline. diff --git a/docs/resources/query.md b/docs/resources/query.md new file mode 100644 index 0000000000..cc8bc90edd --- /dev/null +++ b/docs/resources/query.md @@ -0,0 +1,195 @@ +--- +subcategory: "Databricks SQL" +--- +# databricks_query Resource + +This resource allows you to manage [Databricks SQL Queries](https://docs.databricks.com/en/sql/user/queries/index.html). It supersedes [databricks_sql_query](sql_query.md) resource - see migration guide below for more details. + +## Example Usage + +```hcl +resource "databricks_directory" "shared_dir" { + path = "/Shared/Queries" +} + +# This will be replaced with new databricks_query resource +resource "databricks_query" "this" { + warehouse_id = databricks_sql_endpoint.example.id + display_name = "My Query Name" + query_text = "SELECT 42 as value" + parent_path = databricks_directory.shared_dir.path +} +``` + +## Argument Reference + +The following arguments are available: + +* `query_text` - (Required, String) Text of SQL query. +* `display_name` - (Required, String) Name of the query. +* `warehouse_id` - (Required, String) ID of a SQL warehouse which will be used to execute this query. +* `parent_path` - (Optional, String) The path to a workspace folder containing the query. The default is the user's home folder. If changed, the query will be recreated. +* `owner_user_name` - (Optional, String) Query owner's username. +* `apply_auto_limit` - (Optional, Boolean) Whether to apply a 1000 row limit to the query result. +* `catalog` - (Optional, String) Name of the catalog where this query will be executed. +* `schema` - (Optional, String) Name of the schema where this query will be executed. +* `description` - (Optional, String) General description that conveys additional information about this query such as usage notes. +* `run_as_mode` - (Optional, String) Sets the "Run as" role for the object. +* `tags` - (Optional, List of strings) Tags that will be added to the query. +* `parameter` - (Optional, Block) Query parameter definition. Consists of following attributes (one of `*_value` is required): + * `name` - (Required, String) Literal parameter marker that appears between double curly braces in the query text. + * `title` - (Optional, String) Text displayed in the user-facing parameter widget in the UI. + * `text_value` - (Block) Text parameter value. Consists of following attributes: + * `value` - (Required, String) - actual text value. + * `numeric_value` - (Block) Numeric parameter value. Consists of following attributes: + * `value` - (Required, Double) - actual numeric value. + * `date_value` - (Block) Date query parameter value. Consists of following attributes (Can only specify one of `dynamic_date_value` or `date_value`): + * `date_value` - (String) Manually specified date-time value + * `dynamic_date_value` - (String) Dynamic date-time value based on current date-time. Possible values are `NOW`, `YESTERDAY`. + * `precision` - (Optional, String) Date-time precision to format the value into when the query is run. Possible values are `DAY_PRECISION`, `MINUTE_PRECISION`, `SECOND_PRECISION`. Defaults to `DAY_PRECISION` (`YYYY-MM-DD`). + * `date_range_value` - (Block) Date-range query parameter value. Consists of following attributes (Can only specify one of `dynamic_date_range_value` or `date_range_value`): + * `date_range_value` - (Block) Manually specified date-time range value. Consists of the following attributes: + * `start` (Required, String) - begin of the date range. + * `end` (Required, String) - end of the date range. + * `dynamic_date_range_value` - (String) Dynamic date-time range value based on current date-time. Possible values are `TODAY`, `YESTERDAY`, `THIS_WEEK`, `THIS_MONTH`, `THIS_YEAR`, `LAST_WEEK`, `LAST_MONTH`, `LAST_YEAR`, `LAST_HOUR`, `LAST_8_HOURS`, `LAST_24_HOURS`, `LAST_7_DAYS`, `LAST_14_DAYS`, `LAST_30_DAYS`, `LAST_60_DAYS`, `LAST_90_DAYS`, `LAST_12_MONTHS`. + * `start_day_of_week` - (Optional, Int) Specify what day that starts the week. + * `precision` - (Optional, String) Date-time precision to format the value into when the query is run. Possible values are `DAY_PRECISION`, `MINUTE_PRECISION`, `SECOND_PRECISION`. Defaults to `DAY_PRECISION` (`YYYY-MM-DD`). + * `enum_value` - (Block) Dropdown parameter value. Consists of following attributes: + * `enum_options` - (String) List of valid query parameter values, newline delimited. + * `values` - (Array of strings) List of selected query parameter values. + * `multi_values_options` - (Optional, Block) If specified, allows multiple values to be selected for this parameter. Consists of following attributes: + * `prefix` - (Optional, String) Character that prefixes each selected parameter value. + * `separator` - (Optional, String) Character that separates each selected parameter value. Defaults to a comma. + * `suffix` - (Optional, String) Character that suffixes each selected parameter value. + * `query_backed_value` - (Block) Query-based dropdown parameter value. Consists of following attributes: + * `query_id` - (Required, String) ID of the query that provides the parameter values. + * `values` - (Array of strings) List of selected query parameter values. + * `multi_values_options` - (Optional, Block) If specified, allows multiple values to be selected for this parameter. Consists of following attributes: + * `prefix` - (Optional, String) Character that prefixes each selected parameter value. + * `separator` - (Optional, String) Character that separates each selected parameter value. Defaults to a comma. + * `suffix` - (Optional, String) Character that suffixes each selected parameter value. + +## Attribute Reference + +In addition to all the arguments above, the following attributes are exported: + +* `id` - unique ID of the created Query. +* `lifecycle_state` - The workspace state of the query. Used for tracking trashed status. (Possible values are `ACTIVE` or `TRASHED`). +* `last_modifier_user_name` - Username of the user who last saved changes to this query. +* `create_time` - The timestamp string indicating when the query was created. +* `update_time` - The timestamp string indicating when the query was updated. + +## Migrating from `databricks_sql_query` resource + +Under the hood, the new resource uses the same data as the `databricks_sql_query`, but exposed via different API. This means that we can migrate existing queries without recreating them. This operation is done in few steps: + +* Record the ID of existing `databricks_sql_query`, for example, by executing the `terraform state show databricks_sql_query.query` command. +* Create the code for the new implementation performing following changes: + * the `name` attribute is now named `display_name` + * the `parent` (if exists) is renamed to `parent_path` attribute, and should be converted from `folders/object_id` to the actual path. + * Blocks that specify values in the `parameter` block were renamed (see above). + +For example, if we have the original `databricks_sql_query` defined as: + +```hcl +resource "databricks_sql_query" "query" { + data_source_id = databricks_sql_endpoint.example.data_source_id + query = "select 42 as value" + name = "My Query" + parent = "folders/${databricks_directory.shared_dir.object_id}" + + parameter { + name = "p1" + title = "Title for p1" + text { + value = "default" + } + } +} +``` + +we'll have a new resource defined as: + +```hcl +resource "databricks_query" "query" { + warehouse_id = databricks_sql_endpoint.example.id + query_text = "select 42 as value" + display_name = "My Query" + parent_path = databricks_directory.shared_dir.path + + parameter { + name = "p1" + title = "Title for p1" + text_value { + value = "default" + } + } +} +``` + +### For Terraform version >= 1.7.0 + +Terraform 1.7 introduced the [removed](https://developer.hashicorp.com/terraform/language/resources/syntax#removing-resources) block in addition to the [import](https://developer.hashicorp.com/terraform/language/import) block introduced in Terraform 1.5. Together they make import and removal of resources easier, avoiding manual execution of `terraform import` and `terraform state rm` commands. + +So with Terraform 1.7+, the migration looks as the following: + +* remove the old query definition and replace it with the new one. +* Adjust references, like, `databricks_permissions`. +* Add `import` and `removed` blocks like this: + +```hcl +import { + to = databricks_query.query + id = "" +} + +removed { + from = databricks_sql_query.query + + lifecycle { + destroy = false + } +} +``` + +* Run the `terraform plan` command to check possible changes, such as value type change, etc. +* Run the `terraform apply` command to apply changes. +* Remove the `import` and `removed` blocks from the code. + +### For Terraform version < 1.7.0 + +* Remove the old query definition and replace it with the new one. +* Remove the old resource from the state with the `terraform state rm databricks_sql_query.query` command. +* Import new resource with the `terraform import databricks_query.query ` command. +* Adjust references, like, `databricks_permissions`. +* Run the `terraform plan` command to check possible changes, such as value type change, etc. + +## Access Control + +[databricks_permissions](permissions.md#sql-query-usage) can control which groups or individual users can *Manage*, *Edit*, *Run* or *View* individual queries. + +```hcl +resource "databricks_permissions" "query_usage" { + sql_query_id = databricks_query.query.id + access_control { + group_name = "users" + permission_level = "CAN_RUN" + } +} +``` + +## Import + +This resource can be imported using query ID: + +```bash +terraform import databricks_query.this +``` + +## Related Resources + +The following resources are often used in the same context: + +* [databricks_alert](alert.md) to manage [Databricks SQL Alerts](https://docs.databricks.com/en/sql/user/alerts/index.html). +* [databricks_sql_endpoint](sql_endpoint.md) to manage [Databricks SQL Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). +* [databricks_directory](directory.md) to manage directories in [Databricks Workpace](https://docs.databricks.com/workspace/workspace-objects.html). diff --git a/internal/acceptance/alert_test.go b/internal/acceptance/alert_test.go index 22ed542468..b7ffe2e4c5 100644 --- a/internal/acceptance/alert_test.go +++ b/internal/acceptance/alert_test.go @@ -7,14 +7,14 @@ import ( func TestAccAlert(t *testing.T) { WorkspaceLevel(t, Step{ Template: ` - resource "databricks_sql_query" "this" { - data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}" - name = "tf-{var.RANDOM}" - query = "SELECT 1 AS p1, 2 as p2" + resource "databricks_query" "this" { + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + display_name = "tf-{var.RANDOM}" + query_text = "SELECT 1 AS p1, 2 as p2" } resource "databricks_alert" "alert" { - query_id = databricks_sql_query.this.id + query_id = databricks_query.this.id display_name = "tf-alert-{var.RANDOM}" condition { op = "EQUAL" @@ -33,14 +33,14 @@ func TestAccAlert(t *testing.T) { `, }, Step{ Template: ` - resource "databricks_sql_query" "this" { - data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}" - name = "tf-{var.RANDOM}" - query = "SELECT 1 AS p1, 2 as p2" + resource "databricks_query" "this" { + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + display_name = "tf-{var.RANDOM}" + query_text = "SELECT 1 AS p1, 2 as p2" } resource "databricks_alert" "alert" { - query_id = databricks_sql_query.this.id + query_id = databricks_query.this.id display_name = "tf-alert-{var.RANDOM}" condition { op = "GREATER_THAN" diff --git a/internal/acceptance/permissions_test.go b/internal/acceptance/permissions_test.go index 7c5da72512..2033a100ad 100644 --- a/internal/acceptance/permissions_test.go +++ b/internal/acceptance/permissions_test.go @@ -841,20 +841,20 @@ func TestAccPermissions_ServingEndpoint(t *testing.T) { func TestAccPermissions_Alert(t *testing.T) { loadDebugEnvIfRunsFromIDE(t, "workspace") alertTemplate := ` - resource "databricks_sql_query" "this" { - name = "{var.STICKY_RANDOM}-query" - query = "SELECT 1 AS p1, 2 as p2" - data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}" + resource "databricks_query" "this" { + display_name = "{var.STICKY_RANDOM}-query" + query_text = "SELECT 1 AS p1, 2 as p2" + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" } resource "databricks_alert" "this" { - query_id = databricks_sql_query.this.id + query_id = databricks_query.this.id display_name = "{var.STICKY_RANDOM}-alert" condition { op = "GREATER_THAN" operand { column { - name = "value" + name = "p1" } } threshold { @@ -876,3 +876,23 @@ func TestAccPermissions_Alert(t *testing.T) { ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for alert, allowed levels: CAN_MANAGE"), }) } + +func TestAccPermissions_Query(t *testing.T) { + loadDebugEnvIfRunsFromIDE(t, "workspace") + queryTemplate := ` + resource "databricks_query" "this" { + display_name = "{var.STICKY_RANDOM}-query" + query_text = "SELECT 1 AS p1, 2 as p2" + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + }` + WorkspaceLevel(t, Step{ + Template: queryTemplate + makePermissionsTestStage("sql_query_id", "databricks_query.this.id", groupPermissions("CAN_VIEW")), + }, Step{ + Template: queryTemplate + makePermissionsTestStage("sql_query_id", "databricks_query.this.id", + currentPrincipalPermission(t, "CAN_MANAGE"), groupPermissions("CAN_VIEW", "CAN_EDIT", "CAN_RUN", "CAN_MANAGE")), + }, Step{ + Template: queryTemplate + makePermissionsTestStage("sql_query_id", "databricks_query.this.id", + currentPrincipalPermission(t, "CAN_VIEW"), groupPermissions("CAN_VIEW", "CAN_EDIT", "CAN_RUN", "CAN_MANAGE")), + ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for query, allowed levels: CAN_MANAGE"), + }) +} diff --git a/internal/acceptance/query_test.go b/internal/acceptance/query_test.go new file mode 100644 index 0000000000..72230d3373 --- /dev/null +++ b/internal/acceptance/query_test.go @@ -0,0 +1,32 @@ +package acceptance + +import ( + "testing" +) + +func TestAccQuery(t *testing.T) { + WorkspaceLevel(t, Step{ + Template: ` + resource "databricks_query" "this" { + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + display_name = "tf-{var.RANDOM}" + query_text = "SELECT 1 AS p1, 2 as p2" + } +`, + }, Step{ + Template: ` + resource "databricks_query" "this" { + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + display_name = "tf-{var.RANDOM}" + query_text = "SELECT 1 AS p1, 2 as p2" + parameter { + name = "foo" + text_value { + value = "bar" + } + title = "foo" + } + } +`, + }) +} diff --git a/internal/acceptance/sql_query_test.go b/internal/acceptance/sql_query_test.go index bc49c9ee6f..156374db1c 100644 --- a/internal/acceptance/sql_query_test.go +++ b/internal/acceptance/sql_query_test.go @@ -4,7 +4,7 @@ import ( "testing" ) -func TestAccQuery(t *testing.T) { +func TestAccSqlQuery(t *testing.T) { WorkspaceLevel(t, Step{ Template: ` resource "databricks_sql_query" "q1" { diff --git a/internal/providers/sdkv2/sdkv2.go b/internal/providers/sdkv2/sdkv2.go index 8136901ddf..d40d663ee2 100644 --- a/internal/providers/sdkv2/sdkv2.go +++ b/internal/providers/sdkv2/sdkv2.go @@ -191,6 +191,7 @@ func DatabricksProvider() *schema.Provider { "databricks_pipeline": pipelines.ResourcePipeline().ToResource(), "databricks_provider": sharing.ResourceProvider().ToResource(), "databricks_quality_monitor": catalog.ResourceQualityMonitor().ToResource(), + "databricks_query": sql.ResourceQuery().ToResource(), "databricks_recipient": sharing.ResourceRecipient().ToResource(), "databricks_registered_model": catalog.ResourceRegisteredModel().ToResource(), "databricks_repo": repos.ResourceRepo().ToResource(), diff --git a/pipelines/resource_pipeline.go b/pipelines/resource_pipeline.go index d187e43336..ac18eef8ff 100644 --- a/pipelines/resource_pipeline.go +++ b/pipelines/resource_pipeline.go @@ -246,6 +246,8 @@ func (Pipeline) CustomizeSchema(s *common.CustomizableSchema) *common.Customizab s.SchemaPath("storage").SetConflictsWith([]string{"catalog"}) s.SchemaPath("catalog").SetConflictsWith([]string{"storage"}) s.SchemaPath("ingestion_definition", "connection_name").SetConflictsWith([]string{"ingestion_definition.0.ingestion_gateway_id"}) + s.SchemaPath("target").SetConflictsWith([]string{"schema"}) + s.SchemaPath("schema").SetConflictsWith([]string{"target"}) // MinItems fields s.SchemaPath("library").SetMinItems(1) diff --git a/sql/resource_alert.go b/sql/resource_alert.go index 03281d5006..16022548a4 100644 --- a/sql/resource_alert.go +++ b/sql/resource_alert.go @@ -19,7 +19,6 @@ func ResourceAlert() common.Resource { // TODO: can we automatically generate it from SDK? Or should we avoid validation at all? common.CustomizeSchemaPath(m, "condition", "op").SetRequired().SetValidateFunc(validation.StringInSlice([]string{ "GREATER_THAN", "GREATER_THAN_OR_EQUAL", "LESS_THAN", "LESS_THAN_OR_EQUAL", "EQUAL", "NOT_EQUAL", "IS_NULL"}, true)) - common.CustomizeSchemaPath(m, "condition", "op").SetRequired() common.CustomizeSchemaPath(m, "parent_path").SetCustomSuppressDiff(common.WorkspaceOrEmptyPathPrefixDiffSuppress).SetForceNew() common.CustomizeSchemaPath(m, "condition", "operand").SetRequired() common.CustomizeSchemaPath(m, "condition", "operand", "column").SetRequired() @@ -39,6 +38,7 @@ func ResourceAlert() common.Resource { strings.TrimPrefix(f, "condition.0.threshold.0.value.0.")).SetExactlyOneOf(alof) } common.CustomizeSchemaPath(m, "owner_user_name").SetSuppressDiff() + common.CustomizeSchemaPath(m, "notify_on_ok").SetDefault(true) common.CustomizeSchemaPath(m, "id").SetReadOnly() common.CustomizeSchemaPath(m, "create_time").SetReadOnly() common.CustomizeSchemaPath(m, "lifecycle_state").SetReadOnly() diff --git a/sql/resource_alert_test.go b/sql/resource_alert_test.go index f0559434b3..31ef7ed05d 100644 --- a/sql/resource_alert_test.go +++ b/sql/resource_alert_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/service/sql" "github.com/databricks/terraform-provider-databricks/qa" @@ -31,6 +32,7 @@ var ( }, }, ParentPath: "/Workspace/Shared/Alerts", + NotifyOnOk: true, } createHcl = `query_id = "123456" display_name = "TF new alert" @@ -54,6 +56,7 @@ var ( QueryId: "123456", DisplayName: "TF new alert", ParentPath: "/Shared/Alerts", + NotifyOnOk: true, Condition: &sql.AlertCondition{ Op: "GREATER_THAN", Operand: &sql.AlertConditionOperand{ @@ -194,6 +197,7 @@ func TestAlertUpdate(t *testing.T) { QueryId: "123456", DisplayName: "TF new alert", OwnerUserName: "user@domain.com", + NotifyOnOk: false, Condition: &sql.AlertCondition{ Op: "GREATER_THAN", Operand: &sql.AlertConditionOperand{ @@ -223,6 +227,7 @@ func TestAlertUpdate(t *testing.T) { HCL: `query_id = "123456" display_name = "TF new alert" owner_user_name = "user@domain.com" + notify_on_ok = false condition { op = "GREATER_THAN" operand { diff --git a/sql/resource_query.go b/sql/resource_query.go new file mode 100644 index 0000000000..80a69a385c --- /dev/null +++ b/sql/resource_query.go @@ -0,0 +1,169 @@ +package sql + +import ( + "context" + "log" + "strings" + + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) + +// Need a struct for Query because there are aliases we need and it'll be needed in the create method. +type queryStruct struct { + sql.Query +} + +var queryAliasMap = map[string]string{ + "parameters": "parameter", +} + +func (queryStruct) Aliases() map[string]map[string]string { + return map[string]map[string]string{ + "sql.queryStruct": queryAliasMap, + } +} + +func (queryStruct) CustomizeSchema(m *common.CustomizableSchema) *common.CustomizableSchema { + m.SchemaPath("display_name").SetRequired().SetValidateFunc(validation.StringIsNotWhiteSpace) + m.SchemaPath("query_text").SetRequired() + m.SchemaPath("warehouse_id").SetRequired().SetValidateFunc(validation.StringIsNotWhiteSpace) + m.SchemaPath("parent_path").SetCustomSuppressDiff(common.WorkspaceOrEmptyPathPrefixDiffSuppress).SetForceNew() + m.SchemaPath("owner_user_name").SetSuppressDiff() + m.SchemaPath("run_as_mode").SetSuppressDiff() + //m.SchemaPath("").SetSuppressDiff() + //m.SchemaPath("").SetSuppressDiff() + m.SchemaPath("id").SetReadOnly() + m.SchemaPath("create_time").SetReadOnly() + m.SchemaPath("lifecycle_state").SetReadOnly() + m.SchemaPath("last_modifier_user_name").SetReadOnly() + m.SchemaPath("update_time").SetReadOnly() + + // customize parameters + m.SchemaPath("parameter", "name").SetRequired().SetValidateFunc(validation.StringIsNotWhiteSpace) + m.SchemaPath("parameter", "date_range_value", "precision").SetSuppressDiff() + m.SchemaPath("parameter", "date_value", "precision").SetSuppressDiff() + m.SchemaPath("parameter", "query_backed_value", "query_id").SetRequired() + m.SchemaPath("parameter", "text_value", "value").SetRequired() + m.SchemaPath("parameter", "numeric_value", "value").SetRequired() + // TODO: fix setting of AtLeastOneOf + // valuesAlof := []string{ + // "parameter.0.date_range_value", + // "parameter.0.date_value", + // "parameter.0.query_backed_value", + // "parameter.0.text_value", + // "parameter.0.numeric_value", + // "parameter.0.enum_value", + // } + // for _, f := range valuesAlof { + // m.SchemaPath("parameter", strings.TrimPrefix(f, "parameter.0.")).SetAtLeastOneOf(valuesAlof) + // } + return m +} + +type queryCreateStruct struct { + sql.CreateQueryRequestQuery +} + +func (queryCreateStruct) Aliases() map[string]map[string]string { + return map[string]map[string]string{ + "sql.queryCreateStruct": queryAliasMap, + } +} + +func (queryCreateStruct) CustomizeSchema(s *common.CustomizableSchema) *common.CustomizableSchema { + return s +} + +type queryUpdateStruct struct { + sql.UpdateQueryRequestQuery +} + +func (queryUpdateStruct) Aliases() map[string]map[string]string { + return map[string]map[string]string{ + "sql.queryUpdateStruct": queryAliasMap, + } +} + +func (queryUpdateStruct) CustomizeSchema(s *common.CustomizableSchema) *common.CustomizableSchema { + return s +} + +func ResourceQuery() common.Resource { + s := common.StructToSchema(queryStruct{}, nil) + return common.Resource{ + Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var q queryCreateStruct + common.DataToStructPointer(d, s, &q) + apiQuery, err := w.Queries.Create(ctx, sql.CreateQueryRequest{ + Query: &q.CreateQueryRequestQuery, + }) + if err != nil { + return err + } + d.SetId(apiQuery.Id) + owner := d.Get("owner_user_name").(string) + if owner != "" { + _, err = w.Queries.Update(ctx, sql.UpdateQueryRequest{ + Query: &sql.UpdateQueryRequestQuery{ + OwnerUserName: owner, + }, + Id: apiQuery.Id, + UpdateMask: "owner_user_name", + }) + } + return err + }, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + apiQuery, err := w.Queries.GetById(ctx, d.Id()) + if err != nil { + log.Printf("[WARN] error getting query by ID: %v", err) + return err + } + parentPath := d.Get("parent_path").(string) + if parentPath != "" && strings.HasPrefix(apiQuery.ParentPath, "/Workspace") && !strings.HasPrefix(parentPath, "/Workspace") { + apiQuery.ParentPath = strings.TrimPrefix(parentPath, "/Workspace") + } + return common.StructToData(queryStruct{Query: *apiQuery}, s, d) + }, + Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + var q queryUpdateStruct + common.DataToStructPointer(d, s, &q) + updateMask := "display_name,query_text,warehouse_id,parameters" + for _, f := range []string{"run_as_mode", "owner_user_name", "description", "tags", + "apply_auto_limit", "catalog", "schema"} { + if d.HasChange(f) { + updateMask += "," + f + } + } + _, err = w.Queries.Update(ctx, sql.UpdateQueryRequest{ + Query: &q.UpdateQueryRequestQuery, + Id: d.Id(), + UpdateMask: updateMask, + }) + return err + }, + Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + w, err := c.WorkspaceClient() + if err != nil { + return err + } + return w.Queries.DeleteById(ctx, d.Id()) + }, + Schema: s, + } +} diff --git a/sql/resource_query_test.go b/sql/resource_query_test.go new file mode 100644 index 0000000000..ccc3c13608 --- /dev/null +++ b/sql/resource_query_test.go @@ -0,0 +1,153 @@ +package sql + +import ( + "net/http" + "testing" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/experimental/mocks" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/databricks/terraform-provider-databricks/qa" + "github.com/stretchr/testify/mock" +) + +var ( + queryResponse = sql.Query{ + Id: "7890", + WarehouseId: "123456", + DisplayName: "TF new query", + OwnerUserName: "user@domain.com", + ParentPath: "/Workspace/Shared/Querys", + QueryText: "select 42 as value", + } + createQueryHcl = `warehouse_id = "123456" + query_text = "select 42 as value" + display_name = "TF new query" + parent_path = "/Shared/Querys" + owner_user_name = "user@domain.com" +` + createQueryRequest = sql.CreateQueryRequest{ + Query: &sql.CreateQueryRequestQuery{ + WarehouseId: "123456", + QueryText: "select 42 as value", + DisplayName: "TF new query", + ParentPath: "/Shared/Querys", + }} +) + +func TestQueryCreate(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockQueriesAPI().EXPECT() + e.Create(mock.Anything, createQueryRequest).Return(&queryResponse, nil) + e.Update(mock.Anything, sql.UpdateQueryRequest{ + Id: "7890", + UpdateMask: "owner_user_name", + Query: &sql.UpdateQueryRequestQuery{ + OwnerUserName: "user@domain.com", + }, + }).Return(&queryResponse, nil) + e.GetById(mock.Anything, "7890").Return(&queryResponse, nil) + }, + Resource: ResourceQuery(), + Create: true, + HCL: createQueryHcl, + }.ApplyAndExpectData(t, map[string]any{ + "id": "7890", + "warehouse_id": "123456", + "display_name": "TF new query", + "owner_user_name": "user@domain.com", + }) +} + +func TestQueryCreate_Error(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockQueriesAPI().EXPECT() + e.Create(mock.Anything, createQueryRequest).Return(nil, &apierr.APIError{ + StatusCode: http.StatusBadRequest, + Message: "bad payload", + }) + }, + Resource: ResourceQuery(), + Create: true, + HCL: createQueryHcl, + }.ExpectError(t, "bad payload") +} + +func TestQueryRead_Import(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + w.GetMockQueriesAPI().EXPECT().GetById(mock.Anything, "7890").Return(&queryResponse, nil) + }, + Resource: ResourceQuery(), + Read: true, + ID: "7890", + New: true, + }.ApplyAndExpectData(t, map[string]any{ + "id": "7890", + "warehouse_id": "123456", + "query_text": "select 42 as value", + "display_name": "TF new query", + "owner_user_name": "user@domain.com", + }) +} + +func TestQueryRead_Error(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + w.GetMockQueriesAPI().EXPECT().GetById(mock.Anything, "7890").Return(nil, &apierr.APIError{ + StatusCode: http.StatusBadRequest, + Message: "bad payload", + }) + }, + Resource: ResourceQuery(), + Read: true, + ID: "7890", + New: true, + }.ExpectError(t, "bad payload") +} + +func TestQueryDelete(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + w.GetMockQueriesAPI().EXPECT().DeleteById(mock.Anything, "7890").Return(nil) + }, + Resource: ResourceQuery(), + Delete: true, + ID: "7890", + New: true, + }.ApplyNoError(t) +} + +func TestQueryUpdate(t *testing.T) { + qa.ResourceFixture{ + MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) { + e := w.GetMockQueriesAPI().EXPECT() + e.Update(mock.Anything, sql.UpdateQueryRequest{ + Id: "7890", + UpdateMask: "display_name,query_text,warehouse_id,parameters,owner_user_name", + Query: &sql.UpdateQueryRequestQuery{ + WarehouseId: "123456", + DisplayName: "TF new query", + OwnerUserName: "user@domain.com", + QueryText: "select 42 as value", + }}).Return(&queryResponse, nil) + e.GetById(mock.Anything, "7890").Return(&queryResponse, nil) + }, + Resource: ResourceQuery(), + Update: true, + ID: "7890", + HCL: `warehouse_id = "123456" + query_text = "select 42 as value" + display_name = "TF new query" + owner_user_name = "user@domain.com" +`, + }.ApplyAndExpectData(t, map[string]any{ + "id": "7890", + "warehouse_id": "123456", + "query_text": "select 42 as value", + "display_name": "TF new query", + "owner_user_name": "user@domain.com", + }) +} diff --git a/sql/resource_sql_query.go b/sql/resource_sql_query.go index 9359098df9..07b438aa31 100644 --- a/sql/resource_sql_query.go +++ b/sql/resource_sql_query.go @@ -587,6 +587,7 @@ func ResourceSqlQuery() common.Resource { Delete: func(ctx context.Context, data *schema.ResourceData, c *common.DatabricksClient) error { return NewQueryAPI(ctx, c).Delete(data.Id()) }, - Schema: s, + Schema: s, + DeprecationMessage: "This resource is deprecated and will be removed in the future. Please use the `databricks_query` resource instead.", } } diff --git a/sql/resource_sql_query_test.go b/sql/resource_sql_query_test.go index 227373fcbb..6417731003 100644 --- a/sql/resource_sql_query_test.go +++ b/sql/resource_sql_query_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/assert" ) -func TestQueryCreate(t *testing.T) { +func TestSqlQueryCreate(t *testing.T) { d, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ { @@ -65,7 +65,7 @@ func TestQueryCreate(t *testing.T) { assert.Equal(t, "viewer", d.Get("run_as_role")) } -func TestQueryCreateWithMultipleSchedules(t *testing.T) { +func TestSqlQueryCreateWithMultipleSchedules(t *testing.T) { qa.ResourceFixture{ Resource: ResourceSqlQuery(), Create: true, @@ -84,10 +84,10 @@ func TestQueryCreateWithMultipleSchedules(t *testing.T) { } } `, - }.ExpectError(t, "invalid config supplied. [schedule.#.continuous] Conflicting configuration arguments. [schedule.#.daily] Conflicting configuration arguments. [schedule] Argument is deprecated") + }.ExpectError(t, "invalid config supplied. [schedule.#.continuous] Conflicting configuration arguments. [schedule.#.daily] Conflicting configuration arguments. [schedule] Argument is deprecated. Deprecated Resource") } -func TestQueryCreateWithContinuousSchedule(t *testing.T) { +func TestSqlQueryCreateWithContinuousSchedule(t *testing.T) { intervalSeconds := 3600 untilDate := "2021-04-21" @@ -149,7 +149,7 @@ func TestQueryCreateWithContinuousSchedule(t *testing.T) { assert.Equal(t, untilDate, d.Get("schedule.0.continuous.0.until_date")) } -func TestQueryCreateWithDailySchedule(t *testing.T) { +func TestSqlQueryCreateWithDailySchedule(t *testing.T) { intervalDays := 2 intervalSeconds := intervalDays * 24 * 60 * 60 timeOfDay := "06:00" @@ -215,7 +215,7 @@ func TestQueryCreateWithDailySchedule(t *testing.T) { assert.Equal(t, untilDate, d.Get("schedule.0.daily.0.until_date")) } -func TestQueryCreateWithWeeklySchedule(t *testing.T) { +func TestSqlQueryCreateWithWeeklySchedule(t *testing.T) { intervalWeeks := 2 intervalSeconds := intervalWeeks * 7 * 24 * 60 * 60 timeOfDay := "06:00" @@ -284,7 +284,7 @@ func TestQueryCreateWithWeeklySchedule(t *testing.T) { assert.Equal(t, untilDate, d.Get("schedule.0.weekly.0.until_date")) } -func TestQueryCreateDeletesDefaultVisualization(t *testing.T) { +func TestSqlQueryCreateDeletesDefaultVisualization(t *testing.T) { _, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ { @@ -338,7 +338,7 @@ func TestQueryCreateDeletesDefaultVisualization(t *testing.T) { assert.NoError(t, err) } -func TestQueryRead(t *testing.T) { +func TestSqlQueryRead(t *testing.T) { d, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ { @@ -363,7 +363,7 @@ func TestQueryRead(t *testing.T) { assert.Equal(t, "foo", d.Id()) } -func TestQueryReadWithSchedule(t *testing.T) { +func TestSqlQueryReadWithSchedule(t *testing.T) { // Note: this tests that if a schedule is returned by the API, // it will always show up in the resulting resource data. // If it doesn't, we wouldn't be able to erase a schedule @@ -390,7 +390,7 @@ func TestQueryReadWithSchedule(t *testing.T) { assert.Equal(t, 12345, d.Get("schedule.0.continuous.0.interval_seconds")) } -func TestQueryUpdate(t *testing.T) { +func TestSqlQueryUpdate(t *testing.T) { d, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ { @@ -436,7 +436,7 @@ func TestQueryUpdate(t *testing.T) { assert.Equal(t, "SELECT 2", d.Get("query")) } -func TestQueryUpdateWithParams(t *testing.T) { +func TestSqlQueryUpdateWithParams(t *testing.T) { body := api.Query{ ID: "foo", DataSourceID: "xyz", @@ -679,7 +679,7 @@ func TestQueryUpdateWithParams(t *testing.T) { assert.Len(t, d.Get("parameter").([]any), 12) } -func TestQueryDelete(t *testing.T) { +func TestSqlQueryDelete(t *testing.T) { d, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ {