diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ab3878266..7ead3fe7f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -119,16 +119,16 @@ We are migrating the resource from SDKv2 to Plugin Framework provider and hence - `sdkv2`: Contains the changes specific to SDKv2. This package shouldn't depend on pluginfw or common. ### Adding a new resource -1. Check if the directory for this particular resource exists under `internal/providers/pluginfw/resources`, if not create the directory eg: `cluster`, `volume` etc... Please note: Resources and Data sources are organized under the same package for that service. -2. Create a file with resource_resource-name.go and write the CRUD methods, schema for that resource. For reference, please take a look at existing resources eg: `resource_quality_monitor.go` +1. Check if the directory for this particular resource exists under `internal/providers/pluginfw/products`, if not create the directory eg: `cluster`, `volume` etc... Please note: Resources and Data sources are organized under the same package for that service. +2. Create a file with resource_resource-name.go and write the CRUD methods, schema for that resource. For reference, please take a look at existing resources eg: `resource_quality_monitor.go`. Make sure to set the user agent in all the CRUD methods. In the `Metadata()`, if the resource is to be used as default, use the method `GetDatabricksProductionName()` else use `GetDatabricksStagingName()` which suffixes the name with `_pluginframework`. 3. Create a file with `resource_resource-name_acc_test.go` and add integration tests here. 4. Create a file with `resource_resource-name_test.go` and add unit tests here. Note: Please make sure to abstract specific method of the resource so they are unit test friendly and not testing internal part of terraform plugin framework library. You can compare the diagnostics, for example: please take a look at: `data_cluster_test.go` 5. Add the resource under `internal/providers/pluginfw/pluginfw.go` in `Resources()` method. Please update the list so that it stays in alphabetically sorted order. 6. Create a PR and send it for review. ### Adding a new data source -1. Check if the directory for this particular datasource exists under `internal/providers/pluginfw/resources`, if not create the directory eg: `cluster`, `volume` etc... Please note: Resources and Data sources are organized under the same package for that service. -2. Create a file with `data_resource-name.go` and write the CRUD methods, schema for that data source. For reference, please take a look at existing data sources eg: `data_cluster.go` +1. Check if the directory for this particular datasource exists under `internal/providers/pluginfw/products`, if not create the directory eg: `cluster`, `volume` etc... Please note: Resources and Data sources are organized under the same package for that service. +2. Create a file with `data_resource-name.go` and write the CRUD methods, schema for that data source. For reference, please take a look at existing data sources eg: `data_cluster.go`. Make sure to set the user agent in the READ method. In the `Metadata()`, if the resource is to be used as default, use the method `GetDatabricksProductionName()` else use `GetDatabricksStagingName()` which suffixes the name with `_pluginframework`. 3. Create a file with `data_resource-name_acc_test.go` and add integration tests here. 4. Create a file with `data_resource-name_test.go` and add unit tests here. Note: Please make sure to abstract specific method of the resource so they are unit test friendly and not testing internal part of terraform plugin framework library. You can compare the diagnostics, for example: please take a look at: `data_cluster_test.go` 5. Add the resource under `internal/providers/pluginfw/pluginfw.go` in `DataSources()` method. Please update the list so that it stays in alphabetically sorted order. @@ -141,7 +141,7 @@ Ideally there shouldn't be any behaviour change when migrating a resource or dat ### Code Organization -Each resource and data source should be defined in package `internal/providers/pluginfw/resources/`, e.g.: `internal/providers/pluginfw/resources/volume` package will contain both resource, data sources and other utils specific to volumes. Tests (both unit and integration tests) will also remain in this package. +Each resource and data source should be defined in package `internal/providers/plugnifw/products/`, e.g.: `internal/providers/plugnifw/products/volume` package will contain both resource, data sources and other utils specific to volumes. Tests (both unit and integration tests) will also remain in this package. Note: Only Docs will stay under root docs/ directory. diff --git a/clusters/resource_cluster.go b/clusters/resource_cluster.go index 28672e296..93d8d2fb5 100644 --- a/clusters/resource_cluster.go +++ b/clusters/resource_cluster.go @@ -26,26 +26,6 @@ var clusterSchema = resourceClusterSchema() var clusterSchemaVersion = 4 const ( - numWorkerErr = `num_workers may be 0 only for single-node clusters. To create a single node -cluster please include the following configuration in your cluster configuration: - - spark_conf = { - "spark.databricks.cluster.profile" : "singleNode" - "spark.master" : "local[*]" - } - - custom_tags = { - "ResourceClass" = "SingleNode" - } - -Please note that the Databricks Terraform provider cannot detect if the above configuration -is defined in a policy used by the cluster. Please define this in the cluster configuration -itself to create a single node cluster. - -For more details please see: - 1. https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/cluster#fixed-size-or-autoscaling-cluster - 2. https://docs.databricks.com/clusters/single-node.html` - unsupportedExceptCreateEditClusterSpecErr = "unsupported type %T, must be one of %scompute.CreateCluster, %scompute.ClusterSpec or %scompute.EditCluster. Please report this issue to the GitHub repo" ) @@ -130,39 +110,6 @@ func ZoneDiffSuppress(k, old, new string, d *schema.ResourceData) bool { return false } -func Validate(cluster any) error { - var profile, master, resourceClass string - switch c := cluster.(type) { - case compute.CreateCluster: - if c.NumWorkers > 0 || c.Autoscale != nil { - return nil - } - profile = c.SparkConf["spark.databricks.cluster.profile"] - master = c.SparkConf["spark.master"] - resourceClass = c.CustomTags["ResourceClass"] - case compute.EditCluster: - if c.NumWorkers > 0 || c.Autoscale != nil { - return nil - } - profile = c.SparkConf["spark.databricks.cluster.profile"] - master = c.SparkConf["spark.master"] - resourceClass = c.CustomTags["ResourceClass"] - case compute.ClusterSpec: - if c.NumWorkers > 0 || c.Autoscale != nil { - return nil - } - profile = c.SparkConf["spark.databricks.cluster.profile"] - master = c.SparkConf["spark.master"] - resourceClass = c.CustomTags["ResourceClass"] - default: - return fmt.Errorf(unsupportedExceptCreateEditClusterSpecErr, cluster, "", "", "") - } - if profile == "singleNode" && strings.HasPrefix(master, "local") && resourceClass == "SingleNode" { - return nil - } - return errors.New(numWorkerErr) -} - // This method is a duplicate of ModifyRequestOnInstancePool() in clusters/clusters_api.go that uses Go SDK. // Long term, ModifyRequestOnInstancePool() in clusters_api.go will be removed once all the resources using clusters are migrated to Go SDK. func ModifyRequestOnInstancePool(cluster any) error { @@ -443,9 +390,6 @@ func resourceClusterCreate(ctx context.Context, d *schema.ResourceData, c *commo clusters := w.Clusters var createClusterRequest compute.CreateCluster common.DataToStructPointer(d, clusterSchema, &createClusterRequest) - if err := Validate(createClusterRequest); err != nil { - return err - } if err = ModifyRequestOnInstancePool(&createClusterRequest); err != nil { return err } @@ -596,9 +540,6 @@ func resourceClusterUpdate(ctx context.Context, d *schema.ResourceData, c *commo if hasClusterConfigChanged(d) { log.Printf("[DEBUG] Cluster state has changed!") - if err := Validate(cluster); err != nil { - return err - } if err = ModifyRequestOnInstancePool(&cluster); err != nil { return err } diff --git a/clusters/resource_cluster_test.go b/clusters/resource_cluster_test.go index 1ef37126d..00e5e1dfd 100644 --- a/clusters/resource_cluster_test.go +++ b/clusters/resource_cluster_test.go @@ -1630,22 +1630,6 @@ func TestResourceClusterCreate_SingleNode(t *testing.T) { assert.NoError(t, err) assert.Equal(t, 0, d.Get("num_workers")) } - -func TestResourceClusterCreate_SingleNodeFail(t *testing.T) { - _, err := qa.ResourceFixture{ - Create: true, - Resource: ResourceCluster(), - State: map[string]any{ - "autotermination_minutes": 120, - "cluster_name": "Single Node Cluster", - "spark_version": "7.3.x-scala12", - "node_type_id": "Standard_F4s", - "is_pinned": false, - }, - }.Apply(t) - assert.EqualError(t, err, numWorkerErr) -} - func TestResourceClusterCreate_NegativeNumWorkers(t *testing.T) { _, err := qa.ResourceFixture{ Create: true, @@ -1662,27 +1646,59 @@ func TestResourceClusterCreate_NegativeNumWorkers(t *testing.T) { require.Equal(t, true, strings.Contains(err.Error(), "expected num_workers to be at least (0)")) } -func TestResourceClusterUpdate_FailNumWorkersZero(t *testing.T) { - _, err := qa.ResourceFixture{ - ID: "abc", - Update: true, - Resource: ResourceCluster(), - InstanceState: map[string]string{ - "autotermination_minutes": "15", - "cluster_name": "Shared Autoscaling", - "spark_version": "7.1-scala12", - "node_type_id": "i3.xlarge", - "num_workers": "100", +func TestResourceClusterCreate_NumWorkersIsZero(t *testing.T) { + d, err := qa.ResourceFixture{ + Fixtures: []qa.HTTPFixture{ + nothingPinned, + { + Method: "POST", + Resource: "/api/2.1/clusters/create", + ExpectedRequest: compute.CreateCluster{ + NumWorkers: 0, + ClusterName: "Zero workers cluster", + SparkVersion: "7.3.x-scala12", + NodeTypeId: "Standard_F4s", + AutoterminationMinutes: 120, + ForceSendFields: []string{"NumWorkers"}, + }, + Response: compute.ClusterDetails{ + ClusterId: "abc", + State: compute.StateRunning, + }, + }, + { + Method: "GET", + ReuseRequest: true, + Resource: "/api/2.1/clusters/get?cluster_id=abc", + Response: compute.ClusterDetails{ + ClusterId: "abc", + ClusterName: "Zero workers cluster", + SparkVersion: "7.3.x-scala12", + NodeTypeId: "Standard_F4s", + AutoterminationMinutes: 120, + State: compute.StateRunning, + }, + }, + { + Method: "GET", + Resource: "/api/2.0/libraries/cluster-status?cluster_id=abc", + Response: compute.ClusterLibraryStatuses{ + LibraryStatuses: []compute.LibraryFullStatus{}, + }, + }, }, + Create: true, + Resource: ResourceCluster(), State: map[string]any{ - "autotermination_minutes": 15, - "cluster_name": "Shared Autoscaling", - "spark_version": "7.1-scala12", - "node_type_id": "i3.xlarge", - "num_workers": 0, + "autotermination_minutes": 120, + "cluster_name": "Zero workers cluster", + "spark_version": "7.3.x-scala12", + "node_type_id": "Standard_F4s", + "is_pinned": false, }, }.Apply(t) - assert.EqualError(t, err, numWorkerErr) + assert.NoError(t, err) + assert.Equal(t, 0, d.Get("num_workers")) } func TestModifyClusterRequestAws(t *testing.T) { diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 90b782a51..87e75d01d 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -12,14 +12,14 @@ import ( "slices" "strings" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/catalog" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/notificationdestinations" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/registered_model" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/sharing" - "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/catalog" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/cluster" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/library" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/notificationdestinations" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/qualitymonitor" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/registered_model" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/sharing" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/volume" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/resource" ) diff --git a/internal/providers/pluginfw/resources/catalog/data_functions.go b/internal/providers/pluginfw/products/catalog/data_functions.go similarity index 91% rename from internal/providers/pluginfw/resources/catalog/data_functions.go rename to internal/providers/pluginfw/products/catalog/data_functions.go index 6837800b5..9f6c3aba6 100644 --- a/internal/providers/pluginfw/resources/catalog/data_functions.go +++ b/internal/providers/pluginfw/products/catalog/data_functions.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" @@ -16,6 +17,8 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) +const dataSourceName = "functions" + func DataSourceFunctions() datasource.DataSource { return &FunctionsDataSource{} } @@ -34,7 +37,7 @@ type FunctionsData struct { } func (d *FunctionsDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = "databricks_functions" + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) } func (d *FunctionsDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { @@ -52,6 +55,7 @@ func (d *FunctionsDataSource) Configure(_ context.Context, req datasource.Config } func (d *FunctionsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceName) w, diags := d.Client.GetWorkspaceClient() resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { diff --git a/internal/providers/pluginfw/resources/cluster/data_cluster.go b/internal/providers/pluginfw/products/cluster/data_cluster.go similarity index 100% rename from internal/providers/pluginfw/resources/cluster/data_cluster.go rename to internal/providers/pluginfw/products/cluster/data_cluster.go diff --git a/internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go b/internal/providers/pluginfw/products/cluster/data_cluster_acc_test.go similarity index 100% rename from internal/providers/pluginfw/resources/cluster/data_cluster_acc_test.go rename to internal/providers/pluginfw/products/cluster/data_cluster_acc_test.go diff --git a/internal/providers/pluginfw/resources/cluster/data_cluster_test.go b/internal/providers/pluginfw/products/cluster/data_cluster_test.go similarity index 100% rename from internal/providers/pluginfw/resources/cluster/data_cluster_test.go rename to internal/providers/pluginfw/products/cluster/data_cluster_test.go diff --git a/internal/providers/pluginfw/resources/library/resource_library.go b/internal/providers/pluginfw/products/library/resource_library.go similarity index 100% rename from internal/providers/pluginfw/resources/library/resource_library.go rename to internal/providers/pluginfw/products/library/resource_library.go diff --git a/internal/providers/pluginfw/resources/library/resource_library_acc_test.go b/internal/providers/pluginfw/products/library/resource_library_acc_test.go similarity index 100% rename from internal/providers/pluginfw/resources/library/resource_library_acc_test.go rename to internal/providers/pluginfw/products/library/resource_library_acc_test.go diff --git a/internal/providers/pluginfw/resources/notificationdestinations/data_notification_destinations.go b/internal/providers/pluginfw/products/notificationdestinations/data_notification_destinations.go similarity index 93% rename from internal/providers/pluginfw/resources/notificationdestinations/data_notification_destinations.go rename to internal/providers/pluginfw/products/notificationdestinations/data_notification_destinations.go index 441877fdd..8b48a74e3 100755 --- a/internal/providers/pluginfw/resources/notificationdestinations/data_notification_destinations.go +++ b/internal/providers/pluginfw/products/notificationdestinations/data_notification_destinations.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/settings" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/settings_tf" @@ -18,6 +19,8 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) +const dataSourceName = "notification_destinations" + func DataSourceNotificationDestinations() datasource.DataSource { return &NotificationDestinationsDataSource{} } @@ -35,7 +38,7 @@ type NotificationDestinationsInfo struct { } func (d *NotificationDestinationsDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = "databricks_notification_destinations" + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) } func (d *NotificationDestinationsDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { @@ -73,6 +76,7 @@ func AppendDiagAndCheckErrors(resp *datasource.ReadResponse, diags diag.Diagnost } func (d *NotificationDestinationsDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceName) w, diags := d.Client.GetWorkspaceClient() if AppendDiagAndCheckErrors(resp, diags) { return diff --git a/internal/providers/pluginfw/resources/notificationdestinations/data_notification_destinations_acc_test.go b/internal/providers/pluginfw/products/notificationdestinations/data_notification_destinations_acc_test.go similarity index 100% rename from internal/providers/pluginfw/resources/notificationdestinations/data_notification_destinations_acc_test.go rename to internal/providers/pluginfw/products/notificationdestinations/data_notification_destinations_acc_test.go diff --git a/internal/providers/pluginfw/resources/notificationdestinations/data_notification_destinations_test.go b/internal/providers/pluginfw/products/notificationdestinations/data_notification_destinations_test.go similarity index 100% rename from internal/providers/pluginfw/resources/notificationdestinations/data_notification_destinations_test.go rename to internal/providers/pluginfw/products/notificationdestinations/data_notification_destinations_test.go diff --git a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go similarity index 100% rename from internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor.go rename to internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go diff --git a/internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor_acc_test.go similarity index 100% rename from internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go rename to internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor_acc_test.go diff --git a/internal/providers/pluginfw/resources/registered_model/data_registered_model.go b/internal/providers/pluginfw/products/registered_model/data_registered_model.go similarity index 91% rename from internal/providers/pluginfw/resources/registered_model/data_registered_model.go rename to internal/providers/pluginfw/products/registered_model/data_registered_model.go index 64ed516e5..980c46ed1 100644 --- a/internal/providers/pluginfw/resources/registered_model/data_registered_model.go +++ b/internal/providers/pluginfw/products/registered_model/data_registered_model.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" @@ -16,6 +17,8 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) +const dataSourceName = "registered_model" + func DataSourceRegisteredModel() datasource.DataSource { return &RegisteredModelDataSource{} } @@ -34,7 +37,7 @@ type RegisteredModelData struct { } func (d *RegisteredModelDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = "databricks_registered_model" + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceName) } func (d *RegisteredModelDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { @@ -52,6 +55,7 @@ func (d *RegisteredModelDataSource) Configure(_ context.Context, req datasource. } func (d *RegisteredModelDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceName) w, diags := d.Client.GetWorkspaceClient() resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { diff --git a/internal/providers/pluginfw/resources/sharing/data_share.go b/internal/providers/pluginfw/products/sharing/data_share.go similarity index 89% rename from internal/providers/pluginfw/resources/sharing/data_share.go rename to internal/providers/pluginfw/products/sharing/data_share.go index f96d56ac1..5855283c1 100644 --- a/internal/providers/pluginfw/resources/sharing/data_share.go +++ b/internal/providers/pluginfw/products/sharing/data_share.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/sharing" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/sharing_tf" @@ -14,6 +15,8 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource/schema" ) +const dataSourceNameShare = "share" + func DataSourceShare() datasource.DataSource { return &ShareDataSource{} } @@ -25,7 +28,7 @@ type ShareDataSource struct { } func (d *ShareDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName("share") + resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceNameShare) } func (d *ShareDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { @@ -43,6 +46,7 @@ func (d *ShareDataSource) Configure(_ context.Context, req datasource.ConfigureR } func (d *ShareDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceNameShare) w, diags := d.Client.GetWorkspaceClient() resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { diff --git a/internal/providers/pluginfw/resources/sharing/data_shares.go b/internal/providers/pluginfw/products/sharing/data_shares.go similarity index 87% rename from internal/providers/pluginfw/resources/sharing/data_shares.go rename to internal/providers/pluginfw/products/sharing/data_shares.go index 175362119..7b996ab33 100644 --- a/internal/providers/pluginfw/resources/sharing/data_shares.go +++ b/internal/providers/pluginfw/products/sharing/data_shares.go @@ -8,11 +8,14 @@ import ( "github.com/databricks/databricks-sdk-go/service/sharing" "github.com/databricks/terraform-provider-databricks/common" pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" ) +const dataSourceNameShares = "shares" + type SharesList struct { Shares []types.String `tfsdk:"shares" tf:"computed,optional,slice_set"` } @@ -28,7 +31,7 @@ type SharesDataSource struct { } func (d *SharesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName("shares") + resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceNameShares) } func (d *SharesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { @@ -46,6 +49,7 @@ func (d *SharesDataSource) Configure(_ context.Context, req datasource.Configure } func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceNameShares) w, diags := d.Client.GetWorkspaceClient() resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { diff --git a/internal/providers/pluginfw/resources/sharing/data_shares_acc_test.go b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go similarity index 100% rename from internal/providers/pluginfw/resources/sharing/data_shares_acc_test.go rename to internal/providers/pluginfw/products/sharing/data_shares_acc_test.go diff --git a/internal/providers/pluginfw/resources/sharing/resource_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_acc_test.go similarity index 100% rename from internal/providers/pluginfw/resources/sharing/resource_acc_test.go rename to internal/providers/pluginfw/products/sharing/resource_acc_test.go diff --git a/internal/providers/pluginfw/resources/sharing/resource_share.go b/internal/providers/pluginfw/products/sharing/resource_share.go similarity index 99% rename from internal/providers/pluginfw/resources/sharing/resource_share.go rename to internal/providers/pluginfw/products/sharing/resource_share.go index b96cd0e97..e86847e0c 100644 --- a/internal/providers/pluginfw/resources/sharing/resource_share.go +++ b/internal/providers/pluginfw/products/sharing/resource_share.go @@ -164,7 +164,6 @@ func (d *ShareResource) Configure(ctx context.Context, req resource.ConfigureReq func (r *ShareResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { diff --git a/internal/providers/pluginfw/resources/volume/data_volumes.go b/internal/providers/pluginfw/products/volume/data_volumes.go similarity index 100% rename from internal/providers/pluginfw/resources/volume/data_volumes.go rename to internal/providers/pluginfw/products/volume/data_volumes.go diff --git a/internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go b/internal/providers/pluginfw/products/volume/data_volumes_acc_test.go similarity index 100% rename from internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go rename to internal/providers/pluginfw/products/volume/data_volumes_acc_test.go