diff --git a/internal/services/mssql/mssql_database_resource.go b/internal/services/mssql/mssql_database_resource.go index cd0c6efec8a8..88f8b572485b 100644 --- a/internal/services/mssql/mssql_database_resource.go +++ b/internal/services/mssql/mssql_database_resource.go @@ -7,6 +7,7 @@ import ( "context" "fmt" "log" + "math" "strings" "time" @@ -389,8 +390,13 @@ func resourceMsSqlDatabaseCreate(d *pluginsdk.ResourceData, meta interface{}) er if v, ok := d.GetOk("max_size_gb"); ok { // `max_size_gb` is Computed, so has a value after the first run if createMode != string(databases.CreateModeOnlineSecondary) && createMode != string(databases.CreateModeSecondary) { - input.Properties.MaxSizeBytes = pointer.To(int64(v.(int)) * 1073741824) + v, err := calculateMaxSizeBytes(v.(float64)) + if err != nil { + return err + } + input.Properties.MaxSizeBytes = v } + // `max_size_gb` only has change if it is configured if d.HasChange("max_size_gb") && (createMode == string(databases.CreateModeOnlineSecondary) || createMode == string(databases.CreateModeSecondary)) { return fmt.Errorf("it is not possible to change maximum size nor advised to configure maximum size in secondary create mode for %s", id) @@ -784,9 +790,12 @@ func resourceMsSqlDatabaseUpdate(d *pluginsdk.ResourceData, meta interface{}) er if v, ok := d.GetOk("max_size_gb"); ok { // `max_size_gb` is Computed, so has a value after the first run - if createMode != string(databases.CreateModeOnlineSecondary) && createMode != string(databases.CreateModeSecondary) { - props.MaxSizeBytes = pointer.To(int64(v.(int)) * 1073741824) + v, err := calculateMaxSizeBytes(v.(float64)) + if err != nil { + return err } + props.MaxSizeBytes = v + // `max_size_gb` only has change if it is configured if d.HasChange("max_size_gb") && (createMode == string(databases.CreateModeOnlineSecondary) || createMode == string(databases.CreateModeSecondary)) { return fmt.Errorf("it is not possible to change maximum size nor advised to configure maximum size in secondary create mode for %s", id) @@ -1181,9 +1190,7 @@ func resourceMsSqlDatabaseRead(d *pluginsdk.ResourceData, meta interface{}) erro d.Set("license_type", d.Get("license_type").(string)) } - if props.MaxSizeBytes != nil { - d.Set("max_size_gb", int32((*props.MaxSizeBytes)/int64(1073741824))) - } + d.Set("max_size_gb", d.Get("max_size_gb").(float64)) if props.CurrentServiceObjectiveName != nil { skuName = *props.CurrentServiceObjectiveName @@ -1599,10 +1606,10 @@ func resourceMsSqlDatabaseSchema() map[string]*pluginsdk.Schema { "short_term_retention_policy": helper.ShortTermRetentionPolicySchema(), "max_size_gb": { - Type: pluginsdk.TypeInt, + Type: pluginsdk.TypeFloat, Optional: true, Computed: true, - ValidateFunc: validation.IntBetween(1, 4096), + ValidateFunc: validation.FloatBetween(0.1, 4096), }, "min_capacity": { @@ -1884,3 +1891,27 @@ func resourceMsSqlDatabaseSchema() map[string]*pluginsdk.Schema { return resource } + +func calculateMaxSizeBytes(v float64) (*int64, error) { + maxSizeBytes := int64(0) + integer := isPositiveInteger(v) + if integer { + maxSizeBytes = int64(math.Round(v)) * 1073741824 + } else { + // When `max_size_gb` is below 1GB, the API only accepts 100MB and 500MB. 100MB is 104857600, and 500MB is 524288000. + // Since 100MB != 0.1 * 1073741824 and 500MB != 0.5 * 1073741824, directly specify the Bytes when `max_size_gb` is specified `0.1` or `0.5`. + if int64(math.Round(v*10)) == 1 { + maxSizeBytes = 104857600 + } else if int64(math.Round(v*10)) == 5 { + maxSizeBytes = 524288000 + } else { + return nil, fmt.Errorf("`max_size_gb` allows `0.1`, `0.5` and positive integers greater than or equal to 1") + } + } + return pointer.To(maxSizeBytes), nil +} + +func isPositiveInteger(num float64) bool { + // Determine whether `num` is a positive integer and >= 1 + return num >= 1 && num == math.Floor(num) +} diff --git a/internal/services/mssql/mssql_database_resource_test.go b/internal/services/mssql/mssql_database_resource_test.go index 389292857ddf..0e8faf2b61d6 100644 --- a/internal/services/mssql/mssql_database_resource_test.go +++ b/internal/services/mssql/mssql_database_resource_test.go @@ -38,6 +38,35 @@ func TestAccMsSqlDatabase_basic(t *testing.T) { }) } +func TestAccMsSqlDatabase_maxSizeGB(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") + r := MsSqlDatabaseResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.maxSizeGB(data, 0.1), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + { + Config: r.maxSizeGB(data, 0.5), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + { + Config: r.maxSizeGB(data, 1), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("max_size_gb"), + }) +} + func TestAccMsSqlDatabase_free(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test") r := MsSqlDatabaseResource{} @@ -96,7 +125,7 @@ func TestAccMsSqlDatabase_complete(t *testing.T) { check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"), ), }, - data.ImportStep("sample_name"), + data.ImportStep("sample_name", "max_size_gb"), { Config: r.update(data), Check: acceptance.ComposeTestCheckFunc( @@ -108,7 +137,7 @@ func TestAccMsSqlDatabase_complete(t *testing.T) { check.That(data.ResourceName).Key("tags.ENV").HasValue("Staging"), ), }, - data.ImportStep("sample_name"), + data.ImportStep("sample_name", "max_size_gb"), }) } @@ -448,56 +477,56 @@ func TestAccMsSqlDatabase_scaleReplicaSet(t *testing.T) { check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), { Config: r.scaleReplicaSet(data, "P2"), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), { Config: r.scaleReplicaSet(data, "GP_Gen5_2"), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), { Config: r.scaleReplicaSet(data, "BC_Gen5_2"), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), { Config: r.scaleReplicaSet(data, "GP_Gen5_2"), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), { Config: r.scaleReplicaSet(data, "S2"), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), { Config: r.scaleReplicaSet(data, "Basic"), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), { Config: r.scaleReplicaSet(data, "S1"), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, - data.ImportStep("sample_name", "license_type"), + data.ImportStep("sample_name", "license_type", "max_size_gb"), }) } @@ -515,7 +544,7 @@ func TestAccMsSqlDatabase_scaleReplicaSetWithFailovergroup(t *testing.T) { check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), ), }, - data.ImportStep(), + data.ImportStep("max_size_gb"), { Config: r.scaleReplicaSetWithFailovergroup(data, "GP_Gen5_8", 25), Check: acceptance.ComposeTestCheckFunc( @@ -525,7 +554,7 @@ func TestAccMsSqlDatabase_scaleReplicaSetWithFailovergroup(t *testing.T) { check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_8"), ), }, - data.ImportStep(), + data.ImportStep("max_size_gb"), { Config: r.scaleReplicaSetWithFailovergroup(data, "GP_Gen5_2", 5), Check: acceptance.ComposeTestCheckFunc( @@ -535,7 +564,7 @@ func TestAccMsSqlDatabase_scaleReplicaSetWithFailovergroup(t *testing.T) { check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"), ), }, - data.ImportStep(), + data.ImportStep("max_size_gb"), }) } @@ -607,7 +636,7 @@ func TestAccMsSqlDatabase_threatDetectionPolicy(t *testing.T) { check.That(data.ResourceName).Key("threat_detection_policy.0.email_account_admins").HasValue("Enabled"), ), }, - data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"), + data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key", "max_size_gb"), { Config: r.threatDetectionPolicy(data, "Disabled"), Check: acceptance.ComposeTestCheckFunc( @@ -616,7 +645,7 @@ func TestAccMsSqlDatabase_threatDetectionPolicy(t *testing.T) { check.That(data.ResourceName).Key("threat_detection_policy.0.state").HasValue("Disabled"), ), }, - data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"), + data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key", "max_size_gb"), }) } @@ -634,7 +663,7 @@ func TestAccMsSqlDatabase_threatDetectionPolicyNoStorage(t *testing.T) { check.That(data.ResourceName).Key("threat_detection_policy.0.storage_endpoint").IsEmpty(), ), }, - data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"), + data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key", "max_size_gb"), { Config: r.threatDetectionPolicy(data, "Enabled"), Check: acceptance.ComposeTestCheckFunc( @@ -2374,3 +2403,16 @@ resource "azurerm_mssql_database" "test" { } `, r.template(data), data.RandomInteger) } + +func (r MsSqlDatabaseResource) maxSizeGB(data acceptance.TestData, maxSizeGb float64) string { + return fmt.Sprintf(` +%[1]s + +resource "azurerm_mssql_database" "test" { + name = "acctest-db-%[2]d" + server_id = azurerm_mssql_server.test.id + sku_name = "Basic" + max_size_gb = %[3]f +} +`, r.template(data), data.RandomInteger, maxSizeGb) +} diff --git a/website/docs/r/mssql_database.html.markdown b/website/docs/r/mssql_database.html.markdown index c82575621f28..92c7323d81b8 100644 --- a/website/docs/r/mssql_database.html.markdown +++ b/website/docs/r/mssql_database.html.markdown @@ -204,7 +204,7 @@ The following arguments are supported: * `max_size_gb` - (Optional) The max size of the database in gigabytes. -~> **NOTE:** This value should not be configured when the `create_mode` is `Secondary` or `OnlineSecondary`, as the sizing of the primary is then used as per [Azure documentation](https://docs.microsoft.com/azure/azure-sql/database/single-database-scale#geo-replicated-database). +~> **NOTE:** This value should not be configured when the `create_mode` is `Secondary` or `OnlineSecondary`, as the sizing of the primary is then used as per [Azure documentation](https://docs.microsoft.com/azure/azure-sql/database/single-database-scale#geo-replicated-database). The value of `max_size_gb` accepts `0.1`, `0.5` and positive integers greater than or equal to 1. `0.1` means `100MB`, and `0.5` means `500MB`. * `min_capacity` - (Optional) Minimal capacity that database will always have allocated, if not paused. This property is only settable for Serverless databases.