Skip to content

Commit

Permalink
Force send internal_ip_only (GoogleCloudPlatform#11923)
Browse files Browse the repository at this point in the history
  • Loading branch information
slevenick authored and BBBmau committed Oct 24, 2024
1 parent 04485a2 commit 1198a70
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2177,6 +2177,7 @@ func expandGceClusterConfig(d *schema.ResourceData, config *transport_tpg.Config
}
if v, ok := cfg["internal_ip_only"]; ok {
conf.InternalIpOnly = v.(bool)
conf.ForceSendFields = append(conf.ForceSendFields, "InternalIpOnly")
}
if v, ok := cfg["metadata"]; ok {
conf.Metadata = tpgresource.ConvertStringMap(v.(map[string]interface{}))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ func TestAccDataprocCluster_missingZoneGlobalRegion1(t *testing.T) {

rnd := acctest.RandString(t, 10)
acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
Steps: []resource.TestStep{
{
Expand All @@ -44,7 +44,7 @@ func TestAccDataprocCluster_missingZoneGlobalRegion2(t *testing.T) {

rnd := acctest.RandString(t, 10)
acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
Steps: []resource.TestStep{
{
Expand Down Expand Up @@ -559,7 +559,6 @@ func TestAccDataprocCluster_spotWithAuxiliaryNodeGroups(t *testing.T) {
resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.accelerators.0.accelerator_count", "1"),
resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group_id", "node-group-id"),
testAccCheckDataprocAuxiliaryNodeGroupAccelerator(&cluster, project),

),
},
},
Expand Down Expand Up @@ -707,7 +706,7 @@ func TestAccDataprocCluster_withServiceAcc(t *testing.T) {
ExternalProviders: map[string]resource.ExternalProvider{
"time": {},
},
CheckDestroy: testAccCheckDataprocClusterDestroy(t),
CheckDestroy: testAccCheckDataprocClusterDestroy(t),
Steps: []resource.TestStep{
{
Config: testAccDataprocCluster_withServiceAcc(sa, rnd, subnetworkName),
Expand Down Expand Up @@ -827,13 +826,13 @@ func TestAccDataprocCluster_withLifecycleConfigAutoDeletion(t *testing.T) {
CheckDestroy: testAccCheckDataprocClusterDestroy(t),
Steps: []resource.TestStep{
{
Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour * 10).Format(fmtString), subnetworkName),
Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour*10).Format(fmtString), subnetworkName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_lifecycle_config", &cluster),
),
},
{
Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour * 20).Format(fmtString), subnetworkName),
Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour*20).Format(fmtString), subnetworkName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_lifecycle_config", &cluster),
),
Expand Down Expand Up @@ -1046,7 +1045,7 @@ func TestAccDataprocCluster_withMetastoreConfig(t *testing.T) {
updateServiceId := "tf-test-metastore-srv-update-" + acctest.RandString(t, 10)
msName_basic := fmt.Sprintf("projects/%s/locations/us-central1/services/%s", pid, basicServiceId)
msName_update := fmt.Sprintf("projects/%s/locations/us-central1/services/%s", pid, updateServiceId)

var cluster dataproc.Cluster
clusterName := "tf-test-" + acctest.RandString(t, 10)
acctest.VcrTest(t, resource.TestCase{
Expand All @@ -1058,16 +1057,14 @@ func TestAccDataprocCluster_withMetastoreConfig(t *testing.T) {
Config: testAccDataprocCluster_withMetastoreConfig(clusterName, basicServiceId),
Check: resource.ComposeTestCheckFunc(
testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_metastore_config", &cluster),
resource.TestCheckResourceAttr("google_dataproc_cluster.with_metastore_config", "cluster_config.0.metastore_config.0.dataproc_metastore_service",msName_basic),

resource.TestCheckResourceAttr("google_dataproc_cluster.with_metastore_config", "cluster_config.0.metastore_config.0.dataproc_metastore_service", msName_basic),
),
},
{
Config: testAccDataprocCluster_withMetastoreConfig_update(clusterName, updateServiceId),
Check: resource.ComposeTestCheckFunc(
testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_metastore_config", &cluster),
resource.TestCheckResourceAttr("google_dataproc_cluster.with_metastore_config", "cluster_config.0.metastore_config.0.dataproc_metastore_service", msName_update),

),
},
},
Expand Down Expand Up @@ -1417,6 +1414,10 @@ resource "google_dataproc_cluster" "accelerated_cluster" {
region = "us-central1"

cluster_config {
software_config {
image_version = "2.0.35-debian10"
}

gce_cluster_config {
subnetwork = "%s"
zone = "%s"
Expand Down Expand Up @@ -1652,6 +1653,9 @@ resource "google_dataproc_cluster" "basic" {
region = "us-central1"

cluster_config {
software_config {
image_version = "2.0.35-debian10"
}
gce_cluster_config {
subnetwork = "%s"
zone = "us-central1-f"
Expand Down Expand Up @@ -1764,6 +1768,7 @@ resource "google_dataproc_cluster" "with_init_action" {

# Keep the costs down with smallest config we can get away with
software_config {
image_version = "2.0.35-debian10"
override_properties = {
"dataproc:dataproc.allow.zero.workers" = "true"
}
Expand Down Expand Up @@ -2028,6 +2033,7 @@ resource "google_dataproc_cluster" "with_bucket" {

# Keep the costs down with smallest config we can get away with
software_config {
image_version = "2.0.35-debian10"
override_properties = {
"dataproc:dataproc.allow.zero.workers" = "true"
}
Expand Down Expand Up @@ -2061,6 +2067,7 @@ resource "google_dataproc_cluster" "with_bucket" {

# Keep the costs down with smallest config we can get away with
software_config {
image_version = "2.0.35-debian10"
override_properties = {
"dataproc:dataproc.allow.zero.workers" = "true"
}
Expand Down Expand Up @@ -2250,6 +2257,7 @@ resource "google_dataproc_cluster" "with_service_account" {
cluster_config {
# Keep the costs down with smallest config we can get away with
software_config {
image_version = "2.0.35-debian10"
override_properties = {
"dataproc:dataproc.allow.zero.workers" = "true"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -862,6 +862,7 @@ resource "google_dataproc_cluster" "basic" {
cluster_config {
# Keep the costs down with smallest config we can get away with
software_config {
image_version = "2.0.35-debian10"
override_properties = {
"dataproc:dataproc.allow.zero.workers" = "true"
}
Expand Down

0 comments on commit 1198a70

Please sign in to comment.