Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into resource-settings
Browse files Browse the repository at this point in the history
  • Loading branch information
hectorcast-db committed Oct 19, 2023
2 parents 5fdd326 + 8a3089c commit ec9295f
Show file tree
Hide file tree
Showing 86 changed files with 2,987 additions and 642 deletions.
145 changes: 145 additions & 0 deletions .github/workflows/schema.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
name: Provider schema

on:
pull_request:
types: [opened, synchronize]

workflow_dispatch:
inputs:
base:
description: 'Base ref'
default: 'master'
required: true
head:
description: 'Head ref'
default: 'master'
required: true

jobs:
compute_current:
name: "Generate current"
runs-on: ubuntu-latest

steps:
- if: github.event_name == 'pull_request'
name: Checkout
uses: actions/checkout@v4
with:
# Checkout main branch to generate schema for current release
ref: master

- if: github.event_name == 'workflow_dispatch'
name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.base }}

- name: 'Setup Go'
uses: actions/setup-go@v4
with:
go-version: 1.21.x

- name: 'Setup Terraform'
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false

- run: make install

- name: Generate provider schema
shell: bash
run: |
set -ex
cd /tmp
cat > main.tf <<EOF
terraform {
required_providers {
databricks = {
source = "databricks/databricks"
}
}
}
EOF
terraform init
terraform providers schema -json > provider.json
- name: 'Upload provider schema'
uses: actions/upload-artifact@v3
with:
name: schema-current
path: /tmp/provider.json
retention-days: 1

compute_new:
name: "Generate new"
runs-on: ubuntu-latest

steps:
- if: github.event_name == 'pull_request'
name: Checkout
uses: actions/checkout@v4

- if: github.event_name == 'workflow_dispatch'
name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.head }}

- name: 'Setup Go'
uses: actions/setup-go@v4
with:
go-version: 1.21.x

- name: 'Setup Terraform'
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false

- run: make install

- name: Generate provider schema
shell: bash
run: |
set -ex
cd /tmp
cat > main.tf <<EOF
terraform {
required_providers {
databricks = {
source = "databricks/databricks"
}
}
}
EOF
terraform init
terraform providers schema -json > provider.json
- name: 'Upload provider schema'
uses: actions/upload-artifact@v3
with:
name: schema-new
path: /tmp/provider.json
retention-days: 1

diff:
needs: [compute_current, compute_new]

name: "Compute diff"
runs-on: ubuntu-latest

steps:
- name: 'Setup Go'
uses: actions/setup-go@v4
with:
go-version: 1.21.x
cache: false

- run: go install github.com/josephburnett/jd@latest

- name: 'Download provider schemas'
uses: actions/download-artifact@v3

- run: ls -l schema*/*

- run: jd -color schema-current/provider.json schema-new/provider.json
continue-on-error: true
42 changes: 42 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,47 @@
# Version changelog

## 1.28.1
* Fixed read method for `databricks_storage_credential` resource ([#2804](https://github.com/databricks/terraform-provider-databricks/pull/2804)).


## 1.28.0
* Added `dashboard_filters_enabled` attribute to [databricks_sql_dashboard](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_dashboard) resource ([#2725](https://github.com/databricks/terraform-provider-databricks/pull/2725)).
* Added `empty_result_state` attribute to the [databricks_sql_alert](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/sql_alert) resource ([#2724](https://github.com/databricks/terraform-provider-databricks/pull/2724)).
* Added enabled field for queueing ([#2741](https://github.com/databricks/terraform-provider-databricks/pull/2741)).
* Added [databricks_registered_model](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/registered_model) resource ([#2771](https://github.com/databricks/terraform-provider-databricks/pull/2771)).
* Added logging package and fixed issue with API calls not being shown in DEBUG or lower log levels ([#2747](https://github.com/databricks/terraform-provider-databricks/pull/2747)).
* Added [databricks_system_schema](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/system_schema) resource ([#2606](https://github.com/databricks/terraform-provider-databricks/pull/2606)).
* Don't rely on having `@` to check if it's user or SP ([#2765](https://github.com/databricks/terraform-provider-databricks/pull/2765)).
* Forced recreation of UC Volume when `volume_type` and `storage_location` are changed ([#2734](https://github.com/databricks/terraform-provider-databricks/pull/2734)).
* Improved Provider Logging ([#2801](https://github.com/databricks/terraform-provider-databricks/pull/2801)).
* Marked attributes in the `run_as` block in [databricks_job](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/job) as `ExactlyOneOf` ([#2784](https://github.com/databricks/terraform-provider-databricks/pull/2784)).
* Masked sensitive field ([#2755](https://github.com/databricks/terraform-provider-databricks/pull/2755)).
* Removed deprecation warning from `cluster_mount_info` in [databricks_cluster](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/cluster), but mark it as experimental ([#2787](https://github.com/databricks/terraform-provider-databricks/pull/2787)).
* Suppress diff for `user_name` in [databricks_user](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/user) when the changes only in character case ([#2786](https://github.com/databricks/terraform-provider-databricks/pull/2786)).
* Refresh grant lists ([#2746](https://github.com/databricks/terraform-provider-databricks/pull/2746)).
* Fixed run_as_role drift for databricks_sql_query resource ([#2799](https://github.com/databricks/terraform-provider-databricks/pull/2799)).
* Fixed metastore read and add test ([#2795](https://github.com/databricks/terraform-provider-databricks/pull/2795)).

Exporter:
* Exporter: fix a logic for omitting some fields ([#2774](https://github.com/databricks/terraform-provider-databricks/pull/2774)).
* Exporter: improve exporting of [databricks_cluster_policy](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/cluster_policy) resource ([#2680](https://github.com/databricks/terraform-provider-databricks/pull/2680)).
* Exporter: parallel export of resources ([#2742](https://github.com/databricks/terraform-provider-databricks/pull/2742)).

Documentation:
* Updated [databricks_grants](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/grants) examples for [databricks_external_location](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/external_location) ([#2735](https://github.com/databricks/terraform-provider-databricks/pull/2735)).
* Fixed documentation for [databricks_schema](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/schema) about default value for `storage_root` ([#2790](https://github.com/databricks/terraform-provider-databricks/pull/2790)).
* Clarified possible values for `principal` attribute of [databricks_secret_acl](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/secret_acl) ([#2772](https://github.com/databricks/terraform-provider-databricks/pull/2772)).

Other Changes:
* Bumped databricks-sdk-go dependency to 0.21.0 ([#2738](https://github.com/databricks/terraform-provider-databricks/pull/2738)).
* Bumped github.com/databricks/databricks-sdk-go from 0.21.0 to 0.22.0 ([#2761](https://github.com/databricks/terraform-provider-databricks/pull/2761)).
* Bumped github.com/databricks/databricks-sdk-go from 0.22.0 to 0.23.0 ([#2794](https://github.com/databricks/terraform-provider-databricks/pull/2794)).
* Bumped github.com/hashicorp/hcl/v2 from 2.18.0 to 2.18.1 ([#2776](https://github.com/databricks/terraform-provider-databricks/pull/2776)).
* Bumped github.com/zclconf/go-cty from 1.14.0 to 1.14.1 ([#2777](https://github.com/databricks/terraform-provider-databricks/pull/2777)).
* Used `terraform-field-dev` as code owner instead of `field-dev-ecosystem` ([#2718](https://github.com/databricks/terraform-provider-databricks/pull/2718)).
* GitHub Actions workflow to compute provider schema diff ([#2740](https://github.com/databricks/terraform-provider-databricks/pull/2740)).


## 1.27.0
* Fixed [databricks_permissions](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/permissions) resource for correct permissions update. ([#2719](https://github.com/databricks/terraform-provider-databricks/pull/2719)).
* Added `owner` & `force_destroy` to [databricks_metastore_data_access](https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/metastore_data_access) ([#2713](https://github.com/databricks/terraform-provider-databricks/pull/2713)).
Expand Down
2 changes: 1 addition & 1 deletion CODEOWNERS
Original file line number Diff line number Diff line change
@@ -1 +1 @@
* @databricks/field-dev-ecosystem @databricks/eng-plat-auto-exp-reviewers
* @databricks/field-dev-terraform @databricks/eng-plat-auto-exp-reviewers
38 changes: 29 additions & 9 deletions catalog/resource_catalog.go
Original file line number Diff line number Diff line change
Expand Up @@ -93,15 +93,17 @@ func ResourceCatalog() *schema.Resource {
if err != nil {
return err
}
_, err = w.WorkspaceBindings.Update(ctx, catalog.UpdateWorkspaceBindings{
Name: ci.Name,
AssignWorkspaces: []int64{currentMetastoreAssignment.WorkspaceId},
_, err = w.WorkspaceBindings.UpdateBindings(ctx, catalog.UpdateWorkspaceBindingsParameters{
SecurableName: ci.Name,
SecurableType: "catalog",
Add: []catalog.WorkspaceBinding{
{
BindingType: catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite,
WorkspaceId: currentMetastoreAssignment.WorkspaceId,
},
},
})
if err != nil {
return err
}

return nil
return err
},
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
w, err := c.WorkspaceClient()
Expand Down Expand Up @@ -131,7 +133,25 @@ func ResourceCatalog() *schema.Resource {
// So if we don't update the field then the requests would be made to old Name which doesn't exists.
d.SetId(ci.Name)

return nil
if d.Get("isolation_mode") != "ISOLATED" {
return nil
}
// Bind the current workspace if the catalog is isolated, otherwise the read will fail
currentMetastoreAssignment, err := w.Metastores.Current(ctx)
if err != nil {
return err
}
_, err = w.WorkspaceBindings.UpdateBindings(ctx, catalog.UpdateWorkspaceBindingsParameters{
SecurableName: ci.Name,
SecurableType: "catalog",
Add: []catalog.WorkspaceBinding{
{
BindingType: catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite,
WorkspaceId: currentMetastoreAssignment.WorkspaceId,
},
},
})
return err
},
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
w, err := c.WorkspaceClient()
Expand Down
24 changes: 17 additions & 7 deletions catalog/resource_catalog_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -518,14 +518,24 @@ func TestCatalogCreateIsolated(t *testing.T) {
},
{
Method: "PATCH",
Resource: "/api/2.1/unity-catalog/workspace-bindings/catalogs/a",
ExpectedRequest: catalog.UpdateWorkspaceBindings{
Name: "a",
AssignWorkspaces: []int64{123456789101112},
Resource: "/api/2.1/unity-catalog/bindings/catalog/a",
ExpectedRequest: catalog.UpdateWorkspaceBindingsParameters{
SecurableName: "a",
SecurableType: "catalog",
Add: []catalog.WorkspaceBinding{
{
WorkspaceId: int64(123456789101112),
BindingType: catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite,
},
},
},

Response: catalog.CurrentWorkspaceBindings{
Workspaces: []int64{123456789101112},
Response: catalog.WorkspaceBindingsResponse{
Bindings: []catalog.WorkspaceBinding{
{
WorkspaceId: int64(123456789101112),
BindingType: catalog.WorkspaceBindingBindingTypeBindingTypeReadWrite,
},
},
},
},
{
Expand Down
18 changes: 16 additions & 2 deletions catalog/resource_grants.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,9 @@ func getPermissionEndpoint(securable, name string) string {
if securable == "foreign_connection" {
return fmt.Sprintf("/unity-catalog/permissions/connection/%s", name)
}
if securable == "model" {
return fmt.Sprintf("/unity-catalog/permissions/function/%s", name)
}
return fmt.Sprintf("/unity-catalog/permissions/%s/%s", securable, name)
}

Expand Down Expand Up @@ -177,18 +180,21 @@ var mapping = securableMapping{

// v1.0
"ALL_PRIVILEGES": true,
"APPLY_TAG": true,
"BROWSE": true,
},
"view": {
"SELECT": true,
"BROWSE": true,
"SELECT": true,
"APPLY_TAG": true,
"BROWSE": true,
},
"catalog": {
"CREATE": true,
"USAGE": true,

// v1.0
"ALL_PRIVILEGES": true,
"APPLY_TAG": true,
"USE_CATALOG": true,
"USE_SCHEMA": true,
"CREATE_SCHEMA": true,
Expand All @@ -211,6 +217,7 @@ var mapping = securableMapping{

// v1.0
"ALL_PRIVILEGES": true,
"APPLY_TAG": true,
"USE_SCHEMA": true,
"CREATE_TABLE": true,
"CREATE_FUNCTION": true,
Expand Down Expand Up @@ -250,12 +257,14 @@ var mapping = securableMapping{
"metastore": {
// v1.0
"CREATE_CATALOG": true,
"CREATE_CLEAN_ROOM": true,
"CREATE_CONNECTION": true,
"CREATE_EXTERNAL_LOCATION": true,
"CREATE_STORAGE_CREDENTIAL": true,
"CREATE_SHARE": true,
"CREATE_RECIPIENT": true,
"CREATE_PROVIDER": true,
"MANAGE_ALLOWLIST": true,
"USE_CONNECTION": true,
"USE_PROVIDER": true,
"USE_SHARE": true,
Expand All @@ -267,6 +276,11 @@ var mapping = securableMapping{
"ALL_PRIVILEGES": true,
"EXECUTE": true,
},
"model": {
"ALL_PRIVILEGES": true,
"APPLY_TAG": true,
"EXECUTE": true,
},
"materialized_view": {
"ALL_PRIVILEGES": true,
"SELECT": true,
Expand Down
2 changes: 1 addition & 1 deletion catalog/resource_metastore.go
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ func ResourceMetastore() *schema.Resource {
if err != nil {
return err
}
return common.StructToData(mi, s, d)
return common.StructToData(mi.MetastoreInfo, s, d)
}, func(w *databricks.WorkspaceClient) error {
mi, err := w.Metastores.GetById(ctx, d.Id())
if err != nil {
Expand Down
Loading

0 comments on commit ec9295f

Please sign in to comment.