Skip to content

Commit

Permalink
Merge branch 'main' into feature/uc-sql-table-primary-and-foreign-key…
Browse files Browse the repository at this point in the history
…s-support
  • Loading branch information
tanmay-db authored Nov 7, 2024
2 parents a689c02 + 5058e50 commit 11c26ef
Show file tree
Hide file tree
Showing 7 changed files with 149 additions and 15 deletions.
33 changes: 33 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,38 @@
# Version changelog

## [Release] Release v1.57.0

### New Features and Improvements

* Added `databricks_functions` data source ([#4154](https://github.com/databricks/terraform-provider-databricks/pull/4154)).


### Bug Fixes

* Handle edge case for `effective_properties` in `databricks_sql_table` ([#4153](https://github.com/databricks/terraform-provider-databricks/pull/4153)).
* Provide more prescriptive error when users fail to create a single node cluster ([#4168](https://github.com/databricks/terraform-provider-databricks/pull/4168)).


### Internal Changes

* Add test instructions for external contributors ([#4169](https://github.com/databricks/terraform-provider-databricks/pull/4169)).
* Always write message for manual test integration ([#4188](https://github.com/databricks/terraform-provider-databricks/pull/4188)).
* Make `Read` after `Create`/`Update` configurable ([#4190](https://github.com/databricks/terraform-provider-databricks/pull/4190)).
* Migrate Share Data Source to Plugin Framework ([#4161](https://github.com/databricks/terraform-provider-databricks/pull/4161)).
* Migrate Share Resource to Plugin Framework ([#4047](https://github.com/databricks/terraform-provider-databricks/pull/4047)).
* Rollout Plugin Framework ([#4134](https://github.com/databricks/terraform-provider-databricks/pull/4134)).


### Dependency Updates

* Bump Go SDK to v0.50.0 ([#4178](https://github.com/databricks/terraform-provider-databricks/pull/4178)).


### Exporter

* Allow to match resource names by regular expression ([#4177](https://github.com/databricks/terraform-provider-databricks/pull/4177)).


## [Release] Release v1.56.0

### Bug Fixes
Expand Down
29 changes: 18 additions & 11 deletions common/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,18 @@ import (

// Resource aims to simplify things like error & deleted entities handling
type Resource struct {
Create func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Read func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Update func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Delete func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
CustomizeDiff func(ctx context.Context, d *schema.ResourceDiff) error
StateUpgraders []schema.StateUpgrader
Schema map[string]*schema.Schema
SchemaVersion int
Timeouts *schema.ResourceTimeout
DeprecationMessage string
Importer *schema.ResourceImporter
Create func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Read func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Update func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Delete func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
CustomizeDiff func(ctx context.Context, d *schema.ResourceDiff) error
StateUpgraders []schema.StateUpgrader
Schema map[string]*schema.Schema
SchemaVersion int
Timeouts *schema.ResourceTimeout
DeprecationMessage string
Importer *schema.ResourceImporter
CanSkipReadAfterCreateAndUpdate func(d *schema.ResourceData) bool
}

func nicerError(ctx context.Context, err error, action string) error {
Expand Down Expand Up @@ -94,6 +95,9 @@ func (r Resource) ToResource() *schema.Resource {
err = nicerError(ctx, err, "update")
return diag.FromErr(err)
}
if r.CanSkipReadAfterCreateAndUpdate != nil && r.CanSkipReadAfterCreateAndUpdate(d) {
return nil
}
if err := recoverable(r.Read)(ctx, d, c); err != nil {
err = nicerError(ctx, err, "read")
return diag.FromErr(err)
Expand Down Expand Up @@ -162,6 +166,9 @@ func (r Resource) ToResource() *schema.Resource {
err = nicerError(ctx, err, "create")
return diag.FromErr(err)
}
if r.CanSkipReadAfterCreateAndUpdate != nil && r.CanSkipReadAfterCreateAndUpdate(d) {
return nil
}
if err = recoverable(r.Read)(ctx, d, c); err != nil {
err = nicerError(ctx, err, "read")
return diag.FromErr(err)
Expand Down
89 changes: 89 additions & 0 deletions common/resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package common
import (
"context"
"fmt"
"log"
"testing"

"github.com/databricks/databricks-sdk-go/apierr"
Expand Down Expand Up @@ -38,6 +39,94 @@ func TestImportingCallsRead(t *testing.T) {
assert.Equal(t, 1, d.Get("foo"))
}

func createTestResourceForSkipRead(skipRead bool) Resource {
res := Resource{
Create: func(ctx context.Context,
d *schema.ResourceData,
c *DatabricksClient) error {
log.Println("[DEBUG] Create called")
return d.Set("foo", 1)
},
Read: func(ctx context.Context,
d *schema.ResourceData,
c *DatabricksClient) error {
log.Println("[DEBUG] Read called")
d.Set("foo", 2)
return nil
},
Update: func(ctx context.Context,
d *schema.ResourceData,
c *DatabricksClient) error {
log.Println("[DEBUG] Update called")
return d.Set("foo", 3)
},
Schema: map[string]*schema.Schema{
"foo": {
Type: schema.TypeInt,
Required: true,
},
},
}
if skipRead {
res.CanSkipReadAfterCreateAndUpdate = func(d *schema.ResourceData) bool {
return true
}
}
return res
}

func TestCreateSkipRead(t *testing.T) {
client := &DatabricksClient{}
ctx := context.Background()
r := createTestResourceForSkipRead(true).ToResource()
d := r.TestResourceData()
diags := r.CreateContext(ctx, d, client)
assert.False(t, diags.HasError())
assert.Equal(t, 1, d.Get("foo"))
}

func TestCreateDontSkipRead(t *testing.T) {
client := &DatabricksClient{}
ctx := context.Background()
r := createTestResourceForSkipRead(false).ToResource()
d := r.TestResourceData()
diags := r.CreateContext(ctx, d, client)
assert.False(t, diags.HasError())
assert.Equal(t, 2, d.Get("foo"))
}

func TestUpdateSkipRead(t *testing.T) {
client := &DatabricksClient{}
ctx := context.Background()
r := createTestResourceForSkipRead(true).ToResource()
d := r.TestResourceData()
datas, err := r.Importer.StateContext(ctx, d, client)
require.NoError(t, err)
assert.Len(t, datas, 1)
assert.False(t, r.Schema["foo"].ForceNew)
assert.Equal(t, "", d.Id())

diags := r.UpdateContext(ctx, d, client)
assert.False(t, diags.HasError())
assert.Equal(t, 3, d.Get("foo"))
}

func TestUpdateDontSkipRead(t *testing.T) {
client := &DatabricksClient{}
ctx := context.Background()
r := createTestResourceForSkipRead(false).ToResource()
d := r.TestResourceData()
datas, err := r.Importer.StateContext(ctx, d, client)
require.NoError(t, err)
assert.Len(t, datas, 1)
assert.False(t, r.Schema["foo"].ForceNew)
assert.Equal(t, "", d.Id())

diags := r.UpdateContext(ctx, d, client)
assert.False(t, diags.HasError())
assert.Equal(t, 2, d.Get("foo"))
}

func TestHTTP404TriggersResourceRemovalForReadAndDelete(t *testing.T) {
nope := func(ctx context.Context,
d *schema.ResourceData,
Expand Down
2 changes: 1 addition & 1 deletion common/version.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package common
import "context"

var (
version = "1.56.0"
version = "1.57.0"
// ResourceName is resource name without databricks_ prefix
ResourceName contextKey = 1
// Provider is the current instance of provider
Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ go 1.22

require (
github.com/databricks/databricks-sdk-go v0.50.0
github.com/golang-jwt/jwt/v4 v4.5.0
github.com/golang-jwt/jwt/v4 v4.5.1
github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320
github.com/hashicorp/hcl v1.0.0
github.com/hashicorp/hcl/v2 v2.22.0
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo=
github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
Expand Down
5 changes: 5 additions & 0 deletions pipelines/resource_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,11 @@ func (Pipeline) CustomizeSchema(s *common.CustomizableSchema) *common.Customizab
s.SchemaPath("edition").SetCustomSuppressDiff(common.EqualFoldDiffSuppress)
s.SchemaPath("storage").SetCustomSuppressDiff(suppressStorageDiff)

// As of 6th Nov 2024, the DLT API only normalizes the catalog name when creating
// a pipeline. So we only ignore the equal fold diff for the catalog name and not other
// UC resources like target, schema or ingestion_definition.connection_name.
s.SchemaPath("catalog").SetCustomSuppressDiff(common.EqualFoldDiffSuppress)

// Deprecated fields
s.SchemaPath("cluster", "init_scripts", "dbfs").SetDeprecated(clusters.DbfsDeprecationWarning)
s.SchemaPath("library", "whl").SetDeprecated("The 'whl' field is deprecated")
Expand Down

0 comments on commit 11c26ef

Please sign in to comment.