Skip to content

Commit

Permalink
Merge pull request #11513 from c2thorn/sync-main-FEATURE-BRANCH-6.0.0 (
Browse files Browse the repository at this point in the history
…#8012)

Sync main feature branch 6.0.0 - 8/21

[upstream:5041f537b74e607bc605f1fc5cb1b89f428f2a6f]

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Aug 21, 2024
1 parent e376147 commit 3e09a69
Show file tree
Hide file tree
Showing 13 changed files with 307 additions and 76 deletions.
3 changes: 3 additions & 0 deletions .changelog/11513.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:none

```
23 changes: 23 additions & 0 deletions google-beta/acctest/resource_test_utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,40 @@
package acctest

import (
"context"
"errors"
"fmt"
"slices"
"testing"
"time"

tfjson "github.com/hashicorp/terraform-json"
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/plancheck"
"github.com/hashicorp/terraform-plugin-testing/terraform"
transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport"
)

// General test utils

var _ plancheck.PlanCheck = expectNoDelete{}

type expectNoDelete struct{}

func (e expectNoDelete) CheckPlan(ctx context.Context, req plancheck.CheckPlanRequest, resp *plancheck.CheckPlanResponse) {
var result error
for _, rc := range req.Plan.ResourceChanges {
if slices.Contains(rc.Change.Actions, tfjson.ActionDelete) {
result = errors.Join(result, fmt.Errorf("expected no deletion of resources, but %s has planned deletion", rc.Address))
}
}
resp.Error = result
}

func ExpectNoDelete() plancheck.PlanCheck {
return expectNoDelete{}
}

// TestExtractResourceAttr navigates a test's state to find the specified resource (or data source) attribute and makes the value
// accessible via the attributeValue string pointer.
func TestExtractResourceAttr(resourceName string, attributeName string, attributeValue *string) resource.TestCheckFunc {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,8 @@ func TestAccComposerEnvironment_basic(t *testing.T) {
// Checks that all updatable fields can be updated in one apply
// (PATCH for Environments only is per-field)
func TestAccComposerEnvironment_update(t *testing.T) {
// Currently failing
acctest.SkipIfVcr(t)
t.Parallel()

envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t))
Expand Down Expand Up @@ -281,6 +283,8 @@ func TestAccComposerEnvironment_withDatabaseConfig(t *testing.T) {
}

func TestAccComposerEnvironment_withWebServerConfig(t *testing.T) {
// Currently failing
acctest.SkipIfVcr(t)
t.Parallel()
envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t))
network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,48 @@ import (
"testing"

"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/plancheck"
)

func TestAccComputeGlobalAddress_update(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"random_suffix": acctest.RandString(t, 10),
}

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckComputeGlobalAddressDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccComputeGlobalAddress_update1(context),
},
{
ResourceName: "google_compute_global_address.foobar",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"labels", "terraform_labels"},
},
{
Config: testAccComputeGlobalAddress_update2(context),
ConfigPlanChecks: resource.ConfigPlanChecks{
PreApply: []plancheck.PlanCheck{
acctest.ExpectNoDelete(),
},
},
},
{
ResourceName: "google_compute_global_address.foobar",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"labels", "terraform_labels"},
},
},
})
}

func TestAccComputeGlobalAddress_ipv6(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -76,3 +116,47 @@ resource "google_compute_global_address" "foobar" {
}
`, networkName, addressName)
}

func testAccComputeGlobalAddress_update1(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_compute_network" "foobar" {
name = "tf-test-address-%{random_suffix}"
}
resource "google_compute_global_address" "foobar" {
address = "172.20.181.0"
description = "Description"
name = "tf-test-address-%{random_suffix}"
labels = {
foo = "bar"
}
ip_version = "IPV4"
prefix_length = 24
address_type = "INTERNAL"
purpose = "VPC_PEERING"
network = google_compute_network.foobar.self_link
}
`, context)
}

func testAccComputeGlobalAddress_update2(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_compute_network" "foobar" {
name = "tf-test-address-%{random_suffix}"
}
resource "google_compute_global_address" "foobar" {
address = "172.20.181.0"
description = "Description"
name = "tf-test-address-%{random_suffix}"
labels = {
foo = "baz"
}
ip_version = "IPV4"
prefix_length = 24
address_type = "INTERNAL"
purpose = "VPC_PEERING"
network = google_compute_network.foobar.self_link
}
`, context)
}
Original file line number Diff line number Diff line change
Expand Up @@ -311,23 +311,38 @@ func resourceDataflowFlexTemplateJobCreate(d *schema.ResourceData, meta interfac
func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_tpg.Config) (dataflow.FlexTemplateRuntimeEnvironment, map[string]string, error) {

updatedParameters := tpgresource.ExpandStringMap(d, "parameters")
if err := hasIllegalParametersErr(d); err != nil {
return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err
}

additionalExperiments := tpgresource.ConvertStringSet(d.Get("additional_experiments").(*schema.Set))

var autoscalingAlgorithm string
autoscalingAlgorithm, updatedParameters = dataflowFlexJobTypeTransferVar("autoscaling_algorithm", "autoscalingAlgorithm", updatedParameters, d)

numWorkers, err := parseInt64("num_workers", d)
if err != nil {
return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err
var numWorkers int
if p, ok := d.GetOk("parameters.numWorkers"); ok {
number, err := strconv.Atoi(p.(string))
if err != nil {
return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, fmt.Errorf("parameters.numWorkers must have a valid integer assigned to it, current value is %s", p.(string))
}
delete(updatedParameters, "numWorkers")
numWorkers = number
} else {
if v, ok := d.GetOk("num_workers"); ok {
numWorkers = v.(int)
}
}

maxNumWorkers, err := parseInt64("max_workers", d)
if err != nil {
return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err
var maxNumWorkers int
if p, ok := d.GetOk("parameters.maxNumWorkers"); ok {
number, err := strconv.Atoi(p.(string))
if err != nil {
return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, fmt.Errorf("parameters.maxNumWorkers must have a valid integer assigned to it, current value is %s", p.(string))
}
delete(updatedParameters, "maxNumWorkers")
maxNumWorkers = number
} else {
if v, ok := d.GetOk("max_workers"); ok {
maxNumWorkers = v.(int)
}
}

network, updatedParameters := dataflowFlexJobTypeTransferVar("network", "network", updatedParameters, d)
Expand All @@ -346,9 +361,22 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t

ipConfiguration, updatedParameters := dataflowFlexJobTypeTransferVar("ip_configuration", "ipConfiguration", updatedParameters, d)

enableStreamingEngine, err := parseBool("enable_streaming_engine", d)
if err != nil {
return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err
var enableStreamingEngine bool
if p, ok := d.GetOk("parameters.enableStreamingEngine"); ok {
delete(updatedParameters, "enableStreamingEngine")
e := strings.ToLower(p.(string))
switch e {
case "true":
enableStreamingEngine = true
case "false":
enableStreamingEngine = false
default:
return dataflow.FlexTemplateRuntimeEnvironment{}, nil, fmt.Errorf("error when handling parameters.enableStreamingEngine value: expected value to be true or false but got value `%s`", e)
}
} else {
if v, ok := d.GetOk("enable_streaming_engine"); ok {
enableStreamingEngine = v.(bool)
}
}

sdkContainerImage, updatedParameters := dataflowFlexJobTypeTransferVar("sdk_container_image", "sdkContainerImage", updatedParameters, d)
Expand All @@ -358,8 +386,8 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t
env := dataflow.FlexTemplateRuntimeEnvironment{
AdditionalUserLabels: tpgresource.ExpandStringMap(d, "effective_labels"),
AutoscalingAlgorithm: autoscalingAlgorithm,
NumWorkers: numWorkers,
MaxWorkers: maxNumWorkers,
NumWorkers: int64(numWorkers),
MaxWorkers: int64(maxNumWorkers),
Network: network,
ServiceAccountEmail: serviceAccountEmail,
Subnetwork: subnetwork,
Expand Down Expand Up @@ -812,43 +840,3 @@ func dataflowFlexJobTypeParameterOverride(ename, pname string, d *schema.Resourc
}
return nil
}

func hasIllegalParametersErr(d *schema.ResourceData) error {
pKey := "parameters"
errFmt := "%s must not include Dataflow options, found: %s"
for k := range ResourceDataflowFlexTemplateJob().Schema {
if _, notOk := d.GetOk(fmt.Sprintf("%s.%s", pKey, k)); notOk {
return fmt.Errorf(errFmt, pKey, k)
}
kk := tpgresource.SnakeToPascalCase(k)
kk = strings.ToLower(kk)
if _, notOk := d.GetOk(fmt.Sprintf("%s.%s", pKey, kk)); notOk {
return fmt.Errorf(errFmt, pKey, kk)
}
}
return nil
}

func parseInt64(name string, d *schema.ResourceData) (int64, error) {
v, ok := d.GetOk(name)
if !ok {
return 0, nil
}
vv, err := strconv.ParseInt(fmt.Sprint(v), 10, 64)
if err != nil {
return 0, fmt.Errorf("illegal value assigned to %s, got: %s", name, v)
}
return vv, nil
}

func parseBool(name string, d *schema.ResourceData) (bool, error) {
v, ok := d.GetOk(name)
if !ok {
return false, nil
}
vv, err := strconv.ParseBool(fmt.Sprint(v))
if err != nil {
return false, fmt.Errorf("illegal value assigned to %s, got: %s", name, v)
}
return vv, nil
}
Original file line number Diff line number Diff line change
Expand Up @@ -582,12 +582,20 @@ func TestAccDataflowFlexTemplateJob_enableStreamingEngine(t *testing.T) {
CheckDestroy: testAccCheckDataflowJobDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataflowFlexTemplateJob_enableStreamingEngine_param(job, bucket, topic),
ExpectError: regexp.MustCompile("must not include Dataflow options"),
Config: testAccDataflowFlexTemplateJob_enableStreamingEngine_param(job, bucket, topic),
Check: resource.ComposeTestCheckFunc(
// Is set
resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "parameters.enableStreamingEngine", "true"),
// Is not set
resource.TestCheckNoResourceAttr("google_dataflow_flex_template_job.flex_job", "enable_streaming_engine"),
),
},
{
Config: testAccDataflowFlexTemplateJob_enableStreamingEngine_field(job, bucket, topic),
Check: resource.ComposeTestCheckFunc(
// Now is unset
resource.TestCheckNoResourceAttr("google_dataflow_flex_template_job.flex_job", "parameters.enableStreamingEngine"),
// Now is set
resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "enable_streaming_engine", "true"),
),
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ import (

// Custom Module tests cannot be run in parallel without running into 409 Conflict reponses.
// Run them as individual steps of an update test instead.
func TestAccSecurityCenterManagementFolderSecurityHealthAnalyticsCustomModule(t *testing.T) {
t.Parallel()
func testAccSecurityCenterManagementFolderSecurityHealthAnalyticsCustomModule(t *testing.T) {

context := map[string]interface{}{
"org_id": envvar.GetTestOrgFromEnv(t),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,28 @@ import (
transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport"
)

func TestAccSecurityCenterManagementOrganizationEventThreatDetectionCustomModule(t *testing.T) {
t.Parallel()
func TestAccSecurityCenterManagement(t *testing.T) {
testCases := map[string]func(t *testing.T){
"orgSecurity": testAccSecurityCenterManagementOrganizationSecurityHealthAnalyticsCustomModule,
"folderSecurity": testAccSecurityCenterManagementFolderSecurityHealthAnalyticsCustomModule,
"projectSecurity": testAccSecurityCenterManagementProjectSecurityHealthAnalyticsCustomModule,
"organization": testAccSecurityCenterManagementOrganizationEventThreatDetectionCustomModule,
}

for name, tc := range testCases {
// shadow the tc variable into scope so that when
// the loop continues, if t.Run hasn't executed tc(t)
// yet, we don't have a race condition
// see https://github.com/golang/go/wiki/CommonMistakes#using-goroutines-on-loop-iterator-variables
tc := tc
t.Run(name, func(t *testing.T) {
tc(t)
})
}
}

func testAccSecurityCenterManagementOrganizationEventThreatDetectionCustomModule(t *testing.T) {
// t.Parallel()

context := map[string]interface{}{
"org_id": envvar.GetTestOrgFromEnv(t),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@ import (

// Custom Module tests cannot be run in parallel without running into 409 Conflict reponses.
// Run them as individual steps of an update test instead.
func TestAccSecurityCenterManagementProjectSecurityHealthAnalyticsCustomModule(t *testing.T) {
t.Parallel()
func testAccSecurityCenterManagementProjectSecurityHealthAnalyticsCustomModule(t *testing.T) {

context := map[string]interface{}{
"random_suffix": acctest.RandString(t, 10),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ import (

// Custom Module tests cannot be run in parallel without running into 409 Conflict reponses.
// Run them as individual steps of an update test instead.
func TestAccSecurityCenterManagementOrganizationSecurityHealthAnalyticsCustomModule(t *testing.T) {
t.Parallel()
func testAccSecurityCenterManagementOrganizationSecurityHealthAnalyticsCustomModule(t *testing.T) {

context := map[string]interface{}{
"org_id": envvar.GetTestOrgFromEnv(t),
Expand Down
Loading

0 comments on commit 3e09a69

Please sign in to comment.