Skip to content

Commit

Permalink
add remote_function_options to bigquery_routine (#9893) (hashicorp#…
Browse files Browse the repository at this point in the history
…7015)

* Add dataGovernanceType and remoteFunctionOptions to bigquery_routine

* add function-sources.zip to biguquery fixtures

* fix resource names in TestAccBigQueryRoutine

* add bigquery routine remote function example

[upstream:8365dfab7960fb8cc9aa5b169d5f9178271f58fa]

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Feb 23, 2024
1 parent a772bb4 commit d5051ff
Show file tree
Hide file tree
Showing 6 changed files with 418 additions and 30 deletions.
3 changes: 3 additions & 0 deletions .changelog/9893.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: added `remote_function_options` field to `bigquery_routine` resource
```
154 changes: 154 additions & 0 deletions google-beta/services/bigquery/resource_bigquery_routine.go
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,47 @@ imported JAVASCRIPT libraries.`,
ValidateFunc: verify.ValidateEnum([]string{"SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA", ""}),
Description: `The language of the routine. Possible values: ["SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA"]`,
},
"remote_function_options": {
Type: schema.TypeList,
Optional: true,
Description: `Remote function specific options.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"connection": {
Type: schema.TypeString,
Optional: true,
Description: `Fully qualified name of the user-provided connection object which holds
the authentication information to send requests to the remote service.
Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"`,
},
"endpoint": {
Type: schema.TypeString,
Optional: true,
Description: `Endpoint of the user-provided remote service, e.g.
'https://us-east1-my_gcf_project.cloudfunctions.net/remote_add'`,
},
"max_batching_rows": {
Type: schema.TypeString,
Optional: true,
Description: `Max number of rows in each batch sent to the remote service. If absent or if 0,
BigQuery dynamically decides the number of rows in a batch.`,
},
"user_defined_context": {
Type: schema.TypeMap,
Computed: true,
Optional: true,
Description: `User-defined context as a set of key/value pairs, which will be sent as function
invocation context together with batched arguments in the requests to the remote
service. The total number of bytes of keys and values must be less than 8KB.
An object containing a list of "key": value pairs. Example:
'{ "name": "wrench", "mass": "1.3kg", "count": "3" }'.`,
Elem: &schema.Schema{Type: schema.TypeString},
},
},
},
},
"return_table_type": {
Type: schema.TypeString,
Optional: true,
Expand Down Expand Up @@ -357,6 +398,12 @@ func resourceBigQueryRoutineCreate(d *schema.ResourceData, meta interface{}) err
} else if v, ok := d.GetOkExists("spark_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(sparkOptionsProp)) && (ok || !reflect.DeepEqual(v, sparkOptionsProp)) {
obj["sparkOptions"] = sparkOptionsProp
}
remoteFunctionOptionsProp, err := expandBigQueryRoutineRemoteFunctionOptions(d.Get("remote_function_options"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("remote_function_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(remoteFunctionOptionsProp)) && (ok || !reflect.DeepEqual(v, remoteFunctionOptionsProp)) {
obj["remoteFunctionOptions"] = remoteFunctionOptionsProp
}

url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines")
if err != nil {
Expand Down Expand Up @@ -493,6 +540,9 @@ func resourceBigQueryRoutineRead(d *schema.ResourceData, meta interface{}) error
if err := d.Set("spark_options", flattenBigQueryRoutineSparkOptions(res["sparkOptions"], d, config)); err != nil {
return fmt.Errorf("Error reading Routine: %s", err)
}
if err := d.Set("remote_function_options", flattenBigQueryRoutineRemoteFunctionOptions(res["remoteFunctionOptions"], d, config)); err != nil {
return fmt.Errorf("Error reading Routine: %s", err)
}

return nil
}
Expand Down Expand Up @@ -579,6 +629,12 @@ func resourceBigQueryRoutineUpdate(d *schema.ResourceData, meta interface{}) err
} else if v, ok := d.GetOkExists("spark_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, sparkOptionsProp)) {
obj["sparkOptions"] = sparkOptionsProp
}
remoteFunctionOptionsProp, err := expandBigQueryRoutineRemoteFunctionOptions(d.Get("remote_function_options"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("remote_function_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, remoteFunctionOptionsProp)) {
obj["remoteFunctionOptions"] = remoteFunctionOptionsProp
}

url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}")
if err != nil {
Expand Down Expand Up @@ -897,6 +953,41 @@ func flattenBigQueryRoutineSparkOptionsMainClass(v interface{}, d *schema.Resour
return v
}

func flattenBigQueryRoutineRemoteFunctionOptions(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["endpoint"] =
flattenBigQueryRoutineRemoteFunctionOptionsEndpoint(original["endpoint"], d, config)
transformed["connection"] =
flattenBigQueryRoutineRemoteFunctionOptionsConnection(original["connection"], d, config)
transformed["user_defined_context"] =
flattenBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(original["userDefinedContext"], d, config)
transformed["max_batching_rows"] =
flattenBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(original["maxBatchingRows"], d, config)
return []interface{}{transformed}
}
func flattenBigQueryRoutineRemoteFunctionOptionsEndpoint(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}

func flattenBigQueryRoutineRemoteFunctionOptionsConnection(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}

func flattenBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}

func flattenBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}

func expandBigQueryRoutineRoutineReference(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {

transformed := make(map[string]interface{})
Expand Down Expand Up @@ -1151,3 +1242,66 @@ func expandBigQueryRoutineSparkOptionsArchiveUris(v interface{}, d tpgresource.T
func expandBigQueryRoutineSparkOptionsMainClass(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}

func expandBigQueryRoutineRemoteFunctionOptions(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})

transformedEndpoint, err := expandBigQueryRoutineRemoteFunctionOptionsEndpoint(original["endpoint"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedEndpoint); val.IsValid() && !tpgresource.IsEmptyValue(val) {
transformed["endpoint"] = transformedEndpoint
}

transformedConnection, err := expandBigQueryRoutineRemoteFunctionOptionsConnection(original["connection"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedConnection); val.IsValid() && !tpgresource.IsEmptyValue(val) {
transformed["connection"] = transformedConnection
}

transformedUserDefinedContext, err := expandBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(original["user_defined_context"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedUserDefinedContext); val.IsValid() && !tpgresource.IsEmptyValue(val) {
transformed["userDefinedContext"] = transformedUserDefinedContext
}

transformedMaxBatchingRows, err := expandBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(original["max_batching_rows"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedMaxBatchingRows); val.IsValid() && !tpgresource.IsEmptyValue(val) {
transformed["maxBatchingRows"] = transformedMaxBatchingRows
}

return transformed, nil
}

func expandBigQueryRoutineRemoteFunctionOptionsEndpoint(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}

func expandBigQueryRoutineRemoteFunctionOptionsConnection(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}

func expandBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) {
if v == nil {
return map[string]string{}, nil
}
m := make(map[string]string)
for k, val := range v.(map[string]interface{}) {
m[k] = val.(string)
}
return m, nil
}

func expandBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import (
transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport"
)

func TestAccBigQueryRoutine_bigQueryRoutineBasicExample(t *testing.T) {
func TestAccBigQueryRoutine_bigqueryRoutineBasicExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
Expand All @@ -43,7 +43,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineBasicExample(t *testing.T) {
CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryRoutine_bigQueryRoutineBasicExample(context),
Config: testAccBigQueryRoutine_bigqueryRoutineBasicExample(context),
},
{
ResourceName: "google_bigquery_routine.sproc",
Expand All @@ -54,7 +54,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineBasicExample(t *testing.T) {
})
}

func testAccBigQueryRoutine_bigQueryRoutineBasicExample(context map[string]interface{}) string {
func testAccBigQueryRoutine_bigqueryRoutineBasicExample(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
Expand All @@ -70,7 +70,7 @@ resource "google_bigquery_routine" "sproc" {
`, context)
}

func TestAccBigQueryRoutine_bigQueryRoutineJsonExample(t *testing.T) {
func TestAccBigQueryRoutine_bigqueryRoutineJsonExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
Expand All @@ -83,7 +83,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineJsonExample(t *testing.T) {
CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryRoutine_bigQueryRoutineJsonExample(context),
Config: testAccBigQueryRoutine_bigqueryRoutineJsonExample(context),
},
{
ResourceName: "google_bigquery_routine.sproc",
Expand All @@ -94,7 +94,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineJsonExample(t *testing.T) {
})
}

func testAccBigQueryRoutine_bigQueryRoutineJsonExample(context map[string]interface{}) string {
func testAccBigQueryRoutine_bigqueryRoutineJsonExample(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
Expand All @@ -120,7 +120,7 @@ resource "google_bigquery_routine" "sproc" {
`, context)
}

func TestAccBigQueryRoutine_bigQueryRoutineTvfExample(t *testing.T) {
func TestAccBigQueryRoutine_bigqueryRoutineTvfExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
Expand All @@ -133,7 +133,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineTvfExample(t *testing.T) {
CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryRoutine_bigQueryRoutineTvfExample(context),
Config: testAccBigQueryRoutine_bigqueryRoutineTvfExample(context),
},
{
ResourceName: "google_bigquery_routine.sproc",
Expand All @@ -144,7 +144,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineTvfExample(t *testing.T) {
})
}

func testAccBigQueryRoutine_bigQueryRoutineTvfExample(context map[string]interface{}) string {
func testAccBigQueryRoutine_bigqueryRoutineTvfExample(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
Expand All @@ -170,7 +170,7 @@ resource "google_bigquery_routine" "sproc" {
`, context)
}

func TestAccBigQueryRoutine_bigQueryRoutinePysparkExample(t *testing.T) {
func TestAccBigQueryRoutine_bigqueryRoutinePysparkExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
Expand All @@ -183,7 +183,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkExample(t *testing.T) {
CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryRoutine_bigQueryRoutinePysparkExample(context),
Config: testAccBigQueryRoutine_bigqueryRoutinePysparkExample(context),
},
{
ResourceName: "google_bigquery_routine.pyspark",
Expand All @@ -194,7 +194,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkExample(t *testing.T) {
})
}

func testAccBigQueryRoutine_bigQueryRoutinePysparkExample(context map[string]interface{}) string {
func testAccBigQueryRoutine_bigqueryRoutinePysparkExample(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
Expand Down Expand Up @@ -240,7 +240,7 @@ resource "google_bigquery_routine" "pyspark" {
`, context)
}

func TestAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(t *testing.T) {
func TestAccBigQueryRoutine_bigqueryRoutinePysparkMainfileExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
Expand All @@ -253,7 +253,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(t *testing.T)
CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(context),
Config: testAccBigQueryRoutine_bigqueryRoutinePysparkMainfileExample(context),
},
{
ResourceName: "google_bigquery_routine.pyspark_mainfile",
Expand All @@ -264,7 +264,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(t *testing.T)
})
}

func testAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(context map[string]interface{}) string {
func testAccBigQueryRoutine_bigqueryRoutinePysparkMainfileExample(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
Expand Down Expand Up @@ -294,7 +294,7 @@ resource "google_bigquery_routine" "pyspark_mainfile" {
`, context)
}

func TestAccBigQueryRoutine_bigQueryRoutineSparkJarExample(t *testing.T) {
func TestAccBigQueryRoutine_bigqueryRoutineSparkJarExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
Expand All @@ -307,7 +307,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineSparkJarExample(t *testing.T) {
CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryRoutine_bigQueryRoutineSparkJarExample(context),
Config: testAccBigQueryRoutine_bigqueryRoutineSparkJarExample(context),
},
{
ResourceName: "google_bigquery_routine.spark_jar",
Expand All @@ -318,7 +318,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineSparkJarExample(t *testing.T) {
})
}

func testAccBigQueryRoutine_bigQueryRoutineSparkJarExample(context map[string]interface{}) string {
func testAccBigQueryRoutine_bigqueryRoutineSparkJarExample(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "tf_test_dataset_id%{random_suffix}"
Expand Down
Loading

0 comments on commit d5051ff

Please sign in to comment.