From 22b8ca4a0903774de6e9cffe48097119ce398fdc Mon Sep 17 00:00:00 2001 From: Shuya Ma Date: Mon, 19 Aug 2024 00:30:09 -0700 Subject: [PATCH] Revert "Prevent Dataflow options in parameters (#11153) (#7943)" This reverts commit 3b4aac25e933fc317f0c2d6bf0ffb8634ff51459. --- .changelog/11153.txt | 3 - .../resource_dataflow_flex_template_job.go | 96 ++++++++----------- ...esource_dataflow_flex_template_job_test.go | 12 ++- 3 files changed, 52 insertions(+), 59 deletions(-) delete mode 100644 .changelog/11153.txt diff --git a/.changelog/11153.txt b/.changelog/11153.txt deleted file mode 100644 index 4bc5d560bf..0000000000 --- a/.changelog/11153.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:enhancement -dataflow: made provider return more descriptive errors when the `parameters` field of `google_dataflow_flex_template_job` contains Dataflow options -``` \ No newline at end of file diff --git a/google-beta/services/dataflow/resource_dataflow_flex_template_job.go b/google-beta/services/dataflow/resource_dataflow_flex_template_job.go index d2fc4ca097..59fa27cec8 100644 --- a/google-beta/services/dataflow/resource_dataflow_flex_template_job.go +++ b/google-beta/services/dataflow/resource_dataflow_flex_template_job.go @@ -311,23 +311,38 @@ func resourceDataflowFlexTemplateJobCreate(d *schema.ResourceData, meta interfac func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_tpg.Config) (dataflow.FlexTemplateRuntimeEnvironment, map[string]string, error) { updatedParameters := tpgresource.ExpandStringMap(d, "parameters") - if err := hasIllegalParametersErr(d); err != nil { - return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err - } additionalExperiments := tpgresource.ConvertStringSet(d.Get("additional_experiments").(*schema.Set)) var autoscalingAlgorithm string autoscalingAlgorithm, updatedParameters = dataflowFlexJobTypeTransferVar("autoscaling_algorithm", "autoscalingAlgorithm", updatedParameters, d) - numWorkers, err := parseInt64("num_workers", d) - if err != nil { - return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err + var numWorkers int + if p, ok := d.GetOk("parameters.numWorkers"); ok { + number, err := strconv.Atoi(p.(string)) + if err != nil { + return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, fmt.Errorf("parameters.numWorkers must have a valid integer assigned to it, current value is %s", p.(string)) + } + delete(updatedParameters, "numWorkers") + numWorkers = number + } else { + if v, ok := d.GetOk("num_workers"); ok { + numWorkers = v.(int) + } } - maxNumWorkers, err := parseInt64("max_workers", d) - if err != nil { - return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err + var maxNumWorkers int + if p, ok := d.GetOk("parameters.maxNumWorkers"); ok { + number, err := strconv.Atoi(p.(string)) + if err != nil { + return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, fmt.Errorf("parameters.maxNumWorkers must have a valid integer assigned to it, current value is %s", p.(string)) + } + delete(updatedParameters, "maxNumWorkers") + maxNumWorkers = number + } else { + if v, ok := d.GetOk("max_workers"); ok { + maxNumWorkers = v.(int) + } } network, updatedParameters := dataflowFlexJobTypeTransferVar("network", "network", updatedParameters, d) @@ -346,9 +361,22 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t ipConfiguration, updatedParameters := dataflowFlexJobTypeTransferVar("ip_configuration", "ipConfiguration", updatedParameters, d) - enableStreamingEngine, err := parseBool("enable_streaming_engine", d) - if err != nil { - return dataflow.FlexTemplateRuntimeEnvironment{}, updatedParameters, err + var enableStreamingEngine bool + if p, ok := d.GetOk("parameters.enableStreamingEngine"); ok { + delete(updatedParameters, "enableStreamingEngine") + e := strings.ToLower(p.(string)) + switch e { + case "true": + enableStreamingEngine = true + case "false": + enableStreamingEngine = false + default: + return dataflow.FlexTemplateRuntimeEnvironment{}, nil, fmt.Errorf("error when handling parameters.enableStreamingEngine value: expected value to be true or false but got value `%s`", e) + } + } else { + if v, ok := d.GetOk("enable_streaming_engine"); ok { + enableStreamingEngine = v.(bool) + } } sdkContainerImage, updatedParameters := dataflowFlexJobTypeTransferVar("sdk_container_image", "sdkContainerImage", updatedParameters, d) @@ -358,8 +386,8 @@ func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_t env := dataflow.FlexTemplateRuntimeEnvironment{ AdditionalUserLabels: tpgresource.ExpandStringMap(d, "effective_labels"), AutoscalingAlgorithm: autoscalingAlgorithm, - NumWorkers: numWorkers, - MaxWorkers: maxNumWorkers, + NumWorkers: int64(numWorkers), + MaxWorkers: int64(maxNumWorkers), Network: network, ServiceAccountEmail: serviceAccountEmail, Subnetwork: subnetwork, @@ -812,43 +840,3 @@ func dataflowFlexJobTypeParameterOverride(ename, pname string, d *schema.Resourc } return nil } - -func hasIllegalParametersErr(d *schema.ResourceData) error { - pKey := "parameters" - errFmt := "%s must not include Dataflow options, found: %s" - for k := range ResourceDataflowFlexTemplateJob().Schema { - if _, notOk := d.GetOk(fmt.Sprintf("%s.%s", pKey, k)); notOk { - return fmt.Errorf(errFmt, pKey, k) - } - kk := tpgresource.SnakeToPascalCase(k) - kk = strings.ToLower(kk) - if _, notOk := d.GetOk(fmt.Sprintf("%s.%s", pKey, kk)); notOk { - return fmt.Errorf(errFmt, pKey, kk) - } - } - return nil -} - -func parseInt64(name string, d *schema.ResourceData) (int64, error) { - v, ok := d.GetOk(name) - if !ok { - return 0, nil - } - vv, err := strconv.ParseInt(fmt.Sprint(v), 10, 64) - if err != nil { - return 0, fmt.Errorf("illegal value assigned to %s, got: %s", name, v) - } - return vv, nil -} - -func parseBool(name string, d *schema.ResourceData) (bool, error) { - v, ok := d.GetOk(name) - if !ok { - return false, nil - } - vv, err := strconv.ParseBool(fmt.Sprint(v)) - if err != nil { - return false, fmt.Errorf("illegal value assigned to %s, got: %s", name, v) - } - return vv, nil -} diff --git a/google-beta/services/dataflow/resource_dataflow_flex_template_job_test.go b/google-beta/services/dataflow/resource_dataflow_flex_template_job_test.go index 3310a86ea8..b18b98e20c 100644 --- a/google-beta/services/dataflow/resource_dataflow_flex_template_job_test.go +++ b/google-beta/services/dataflow/resource_dataflow_flex_template_job_test.go @@ -582,12 +582,20 @@ func TestAccDataflowFlexTemplateJob_enableStreamingEngine(t *testing.T) { CheckDestroy: testAccCheckDataflowJobDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccDataflowFlexTemplateJob_enableStreamingEngine_param(job, bucket, topic), - ExpectError: regexp.MustCompile("must not include Dataflow options"), + Config: testAccDataflowFlexTemplateJob_enableStreamingEngine_param(job, bucket, topic), + Check: resource.ComposeTestCheckFunc( + // Is set + resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "parameters.enableStreamingEngine", "true"), + // Is not set + resource.TestCheckNoResourceAttr("google_dataflow_flex_template_job.flex_job", "enable_streaming_engine"), + ), }, { Config: testAccDataflowFlexTemplateJob_enableStreamingEngine_field(job, bucket, topic), Check: resource.ComposeTestCheckFunc( + // Now is unset + resource.TestCheckNoResourceAttr("google_dataflow_flex_template_job.flex_job", "parameters.enableStreamingEngine"), + // Now is set resource.TestCheckResourceAttr("google_dataflow_flex_template_job.flex_job", "enable_streaming_engine", "true"), ), },