From d5051ff85324c5ad0e3b322e93dd03f6d7337bc4 Mon Sep 17 00:00:00 2001 From: The Magician Date: Fri, 23 Feb 2024 14:21:44 -0800 Subject: [PATCH] add `remote_function_options` to bigquery_routine (#9893) (#7015) * Add dataGovernanceType and remoteFunctionOptions to bigquery_routine * add function-sources.zip to biguquery fixtures * fix resource names in TestAccBigQueryRoutine * add bigquery routine remote function example [upstream:8365dfab7960fb8cc9aa5b169d5f9178271f58fa] Signed-off-by: Modular Magician --- .changelog/9893.txt | 3 + .../bigquery/resource_bigquery_routine.go | 154 ++++++++++++++++ ...esource_bigquery_routine_generated_test.go | 36 ++-- .../resource_bigquery_routine_test.go | 168 ++++++++++++++++++ .../test-fixtures/function-source.zip | Bin 0 -> 458 bytes website/docs/r/bigquery_routine.html.markdown | 87 +++++++-- 6 files changed, 418 insertions(+), 30 deletions(-) create mode 100644 .changelog/9893.txt create mode 100644 google-beta/services/bigquery/test-fixtures/function-source.zip diff --git a/.changelog/9893.txt b/.changelog/9893.txt new file mode 100644 index 0000000000..95ab623752 --- /dev/null +++ b/.changelog/9893.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +bigquery: added `remote_function_options` field to `bigquery_routine` resource +``` \ No newline at end of file diff --git a/google-beta/services/bigquery/resource_bigquery_routine.go b/google-beta/services/bigquery/resource_bigquery_routine.go index d12345b7d9..29119db10d 100644 --- a/google-beta/services/bigquery/resource_bigquery_routine.go +++ b/google-beta/services/bigquery/resource_bigquery_routine.go @@ -150,6 +150,47 @@ imported JAVASCRIPT libraries.`, ValidateFunc: verify.ValidateEnum([]string{"SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA", ""}), Description: `The language of the routine. Possible values: ["SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA"]`, }, + "remote_function_options": { + Type: schema.TypeList, + Optional: true, + Description: `Remote function specific options.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "connection": { + Type: schema.TypeString, + Optional: true, + Description: `Fully qualified name of the user-provided connection object which holds +the authentication information to send requests to the remote service. +Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"`, + }, + "endpoint": { + Type: schema.TypeString, + Optional: true, + Description: `Endpoint of the user-provided remote service, e.g. +'https://us-east1-my_gcf_project.cloudfunctions.net/remote_add'`, + }, + "max_batching_rows": { + Type: schema.TypeString, + Optional: true, + Description: `Max number of rows in each batch sent to the remote service. If absent or if 0, +BigQuery dynamically decides the number of rows in a batch.`, + }, + "user_defined_context": { + Type: schema.TypeMap, + Computed: true, + Optional: true, + Description: `User-defined context as a set of key/value pairs, which will be sent as function +invocation context together with batched arguments in the requests to the remote +service. The total number of bytes of keys and values must be less than 8KB. + +An object containing a list of "key": value pairs. Example: +'{ "name": "wrench", "mass": "1.3kg", "count": "3" }'.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + }, + }, "return_table_type": { Type: schema.TypeString, Optional: true, @@ -357,6 +398,12 @@ func resourceBigQueryRoutineCreate(d *schema.ResourceData, meta interface{}) err } else if v, ok := d.GetOkExists("spark_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(sparkOptionsProp)) && (ok || !reflect.DeepEqual(v, sparkOptionsProp)) { obj["sparkOptions"] = sparkOptionsProp } + remoteFunctionOptionsProp, err := expandBigQueryRoutineRemoteFunctionOptions(d.Get("remote_function_options"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("remote_function_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(remoteFunctionOptionsProp)) && (ok || !reflect.DeepEqual(v, remoteFunctionOptionsProp)) { + obj["remoteFunctionOptions"] = remoteFunctionOptionsProp + } url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines") if err != nil { @@ -493,6 +540,9 @@ func resourceBigQueryRoutineRead(d *schema.ResourceData, meta interface{}) error if err := d.Set("spark_options", flattenBigQueryRoutineSparkOptions(res["sparkOptions"], d, config)); err != nil { return fmt.Errorf("Error reading Routine: %s", err) } + if err := d.Set("remote_function_options", flattenBigQueryRoutineRemoteFunctionOptions(res["remoteFunctionOptions"], d, config)); err != nil { + return fmt.Errorf("Error reading Routine: %s", err) + } return nil } @@ -579,6 +629,12 @@ func resourceBigQueryRoutineUpdate(d *schema.ResourceData, meta interface{}) err } else if v, ok := d.GetOkExists("spark_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, sparkOptionsProp)) { obj["sparkOptions"] = sparkOptionsProp } + remoteFunctionOptionsProp, err := expandBigQueryRoutineRemoteFunctionOptions(d.Get("remote_function_options"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("remote_function_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, remoteFunctionOptionsProp)) { + obj["remoteFunctionOptions"] = remoteFunctionOptionsProp + } url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}") if err != nil { @@ -897,6 +953,41 @@ func flattenBigQueryRoutineSparkOptionsMainClass(v interface{}, d *schema.Resour return v } +func flattenBigQueryRoutineRemoteFunctionOptions(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["endpoint"] = + flattenBigQueryRoutineRemoteFunctionOptionsEndpoint(original["endpoint"], d, config) + transformed["connection"] = + flattenBigQueryRoutineRemoteFunctionOptionsConnection(original["connection"], d, config) + transformed["user_defined_context"] = + flattenBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(original["userDefinedContext"], d, config) + transformed["max_batching_rows"] = + flattenBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(original["maxBatchingRows"], d, config) + return []interface{}{transformed} +} +func flattenBigQueryRoutineRemoteFunctionOptionsEndpoint(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenBigQueryRoutineRemoteFunctionOptionsConnection(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + func expandBigQueryRoutineRoutineReference(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { transformed := make(map[string]interface{}) @@ -1151,3 +1242,66 @@ func expandBigQueryRoutineSparkOptionsArchiveUris(v interface{}, d tpgresource.T func expandBigQueryRoutineSparkOptionsMainClass(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { return v, nil } + +func expandBigQueryRoutineRemoteFunctionOptions(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedEndpoint, err := expandBigQueryRoutineRemoteFunctionOptionsEndpoint(original["endpoint"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedEndpoint); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["endpoint"] = transformedEndpoint + } + + transformedConnection, err := expandBigQueryRoutineRemoteFunctionOptionsConnection(original["connection"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedConnection); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["connection"] = transformedConnection + } + + transformedUserDefinedContext, err := expandBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(original["user_defined_context"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedUserDefinedContext); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["userDefinedContext"] = transformedUserDefinedContext + } + + transformedMaxBatchingRows, err := expandBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(original["max_batching_rows"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedMaxBatchingRows); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["maxBatchingRows"] = transformedMaxBatchingRows + } + + return transformed, nil +} + +func expandBigQueryRoutineRemoteFunctionOptionsEndpoint(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandBigQueryRoutineRemoteFunctionOptionsConnection(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandBigQueryRoutineRemoteFunctionOptionsUserDefinedContext(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} + +func expandBigQueryRoutineRemoteFunctionOptionsMaxBatchingRows(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} diff --git a/google-beta/services/bigquery/resource_bigquery_routine_generated_test.go b/google-beta/services/bigquery/resource_bigquery_routine_generated_test.go index 64847c8b14..b31e6ffea3 100644 --- a/google-beta/services/bigquery/resource_bigquery_routine_generated_test.go +++ b/google-beta/services/bigquery/resource_bigquery_routine_generated_test.go @@ -30,7 +30,7 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" ) -func TestAccBigQueryRoutine_bigQueryRoutineBasicExample(t *testing.T) { +func TestAccBigQueryRoutine_bigqueryRoutineBasicExample(t *testing.T) { t.Parallel() context := map[string]interface{}{ @@ -43,7 +43,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineBasicExample(t *testing.T) { CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryRoutine_bigQueryRoutineBasicExample(context), + Config: testAccBigQueryRoutine_bigqueryRoutineBasicExample(context), }, { ResourceName: "google_bigquery_routine.sproc", @@ -54,7 +54,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineBasicExample(t *testing.T) { }) } -func testAccBigQueryRoutine_bigQueryRoutineBasicExample(context map[string]interface{}) string { +func testAccBigQueryRoutine_bigqueryRoutineBasicExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_bigquery_dataset" "test" { dataset_id = "tf_test_dataset_id%{random_suffix}" @@ -70,7 +70,7 @@ resource "google_bigquery_routine" "sproc" { `, context) } -func TestAccBigQueryRoutine_bigQueryRoutineJsonExample(t *testing.T) { +func TestAccBigQueryRoutine_bigqueryRoutineJsonExample(t *testing.T) { t.Parallel() context := map[string]interface{}{ @@ -83,7 +83,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineJsonExample(t *testing.T) { CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryRoutine_bigQueryRoutineJsonExample(context), + Config: testAccBigQueryRoutine_bigqueryRoutineJsonExample(context), }, { ResourceName: "google_bigquery_routine.sproc", @@ -94,7 +94,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineJsonExample(t *testing.T) { }) } -func testAccBigQueryRoutine_bigQueryRoutineJsonExample(context map[string]interface{}) string { +func testAccBigQueryRoutine_bigqueryRoutineJsonExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_bigquery_dataset" "test" { dataset_id = "tf_test_dataset_id%{random_suffix}" @@ -120,7 +120,7 @@ resource "google_bigquery_routine" "sproc" { `, context) } -func TestAccBigQueryRoutine_bigQueryRoutineTvfExample(t *testing.T) { +func TestAccBigQueryRoutine_bigqueryRoutineTvfExample(t *testing.T) { t.Parallel() context := map[string]interface{}{ @@ -133,7 +133,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineTvfExample(t *testing.T) { CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryRoutine_bigQueryRoutineTvfExample(context), + Config: testAccBigQueryRoutine_bigqueryRoutineTvfExample(context), }, { ResourceName: "google_bigquery_routine.sproc", @@ -144,7 +144,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineTvfExample(t *testing.T) { }) } -func testAccBigQueryRoutine_bigQueryRoutineTvfExample(context map[string]interface{}) string { +func testAccBigQueryRoutine_bigqueryRoutineTvfExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_bigquery_dataset" "test" { dataset_id = "tf_test_dataset_id%{random_suffix}" @@ -170,7 +170,7 @@ resource "google_bigquery_routine" "sproc" { `, context) } -func TestAccBigQueryRoutine_bigQueryRoutinePysparkExample(t *testing.T) { +func TestAccBigQueryRoutine_bigqueryRoutinePysparkExample(t *testing.T) { t.Parallel() context := map[string]interface{}{ @@ -183,7 +183,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkExample(t *testing.T) { CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryRoutine_bigQueryRoutinePysparkExample(context), + Config: testAccBigQueryRoutine_bigqueryRoutinePysparkExample(context), }, { ResourceName: "google_bigquery_routine.pyspark", @@ -194,7 +194,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkExample(t *testing.T) { }) } -func testAccBigQueryRoutine_bigQueryRoutinePysparkExample(context map[string]interface{}) string { +func testAccBigQueryRoutine_bigqueryRoutinePysparkExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_bigquery_dataset" "test" { dataset_id = "tf_test_dataset_id%{random_suffix}" @@ -240,7 +240,7 @@ resource "google_bigquery_routine" "pyspark" { `, context) } -func TestAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(t *testing.T) { +func TestAccBigQueryRoutine_bigqueryRoutinePysparkMainfileExample(t *testing.T) { t.Parallel() context := map[string]interface{}{ @@ -253,7 +253,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(t *testing.T) CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(context), + Config: testAccBigQueryRoutine_bigqueryRoutinePysparkMainfileExample(context), }, { ResourceName: "google_bigquery_routine.pyspark_mainfile", @@ -264,7 +264,7 @@ func TestAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(t *testing.T) }) } -func testAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(context map[string]interface{}) string { +func testAccBigQueryRoutine_bigqueryRoutinePysparkMainfileExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_bigquery_dataset" "test" { dataset_id = "tf_test_dataset_id%{random_suffix}" @@ -294,7 +294,7 @@ resource "google_bigquery_routine" "pyspark_mainfile" { `, context) } -func TestAccBigQueryRoutine_bigQueryRoutineSparkJarExample(t *testing.T) { +func TestAccBigQueryRoutine_bigqueryRoutineSparkJarExample(t *testing.T) { t.Parallel() context := map[string]interface{}{ @@ -307,7 +307,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineSparkJarExample(t *testing.T) { CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryRoutine_bigQueryRoutineSparkJarExample(context), + Config: testAccBigQueryRoutine_bigqueryRoutineSparkJarExample(context), }, { ResourceName: "google_bigquery_routine.spark_jar", @@ -318,7 +318,7 @@ func TestAccBigQueryRoutine_bigQueryRoutineSparkJarExample(t *testing.T) { }) } -func testAccBigQueryRoutine_bigQueryRoutineSparkJarExample(context map[string]interface{}) string { +func testAccBigQueryRoutine_bigqueryRoutineSparkJarExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_bigquery_dataset" "test" { dataset_id = "tf_test_dataset_id%{random_suffix}" diff --git a/google-beta/services/bigquery/resource_bigquery_routine_test.go b/google-beta/services/bigquery/resource_bigquery_routine_test.go index 14019b7f31..382a285a09 100644 --- a/google-beta/services/bigquery/resource_bigquery_routine_test.go +++ b/google-beta/services/bigquery/resource_bigquery_routine_test.go @@ -175,3 +175,171 @@ resource "google_bigquery_routine" "spark_jar" { } `, context) } + +func TestAccBigQueryRoutine_bigQueryRoutineRemoteFunction(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "zip_path": "./test-fixtures/function-source.zip", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryRoutine_bigQueryRoutineRemoteFunction(context), + }, + { + ResourceName: "google_bigquery_routine.remote_function_routine", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryRoutine_bigQueryRoutineRemoteFunction_Update(context), + }, + { + ResourceName: "google_bigquery_routine.remote_function_routine", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccBigQueryRoutine_bigQueryRoutineRemoteFunction(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_bucket" "default" { + name = "%{random_suffix}-gcf-source" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.default.name + source = "%{zip_path}" +} + +resource "google_cloudfunctions2_function" "default" { + name = "function-v2-0" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs18" + entry_point = "helloHttp" + source { + storage_source { + bucket = google_storage_bucket.default.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +resource "google_bigquery_connection" "test" { + connection_id = "tf_test_connection_id%{random_suffix}" + location = "US" + cloud_resource { } +} + +resource "google_bigquery_dataset" "test" { + dataset_id = "tf_test_dataset_id%{random_suffix}" +} + +resource "google_bigquery_routine" "remote_function_routine" { + dataset_id = "${google_bigquery_dataset.test.dataset_id}" + routine_id = "tf_test_routine_id%{random_suffix}" + routine_type = "SCALAR_FUNCTION" + definition_body = "" + + return_type = "{\"typeKind\" : \"STRING\"}" + + remote_function_options { + endpoint = google_cloudfunctions2_function.default.service_config[0].uri + connection = "${google_bigquery_connection.test.name}" + max_batching_rows = "10" + user_defined_context = { + "z": "1.5", + } + } +} +`, context) +} + +func testAccBigQueryRoutine_bigQueryRoutineRemoteFunction_Update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_bucket" "default" { + name = "%{random_suffix}-gcf-source" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.default.name + source = "%{zip_path}" +} + +resource "google_cloudfunctions2_function" "default2" { + name = "function-v2-1" + location = "us-central1" + description = "a new new function" + + build_config { + runtime = "nodejs18" + entry_point = "helloHttp" + source { + storage_source { + bucket = google_storage_bucket.default.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +resource "google_bigquery_connection" "test2" { + connection_id = "tf_test_connection2_id%{random_suffix}" + location = "US" + cloud_resource { } +} + +resource "google_bigquery_dataset" "test" { + dataset_id = "tf_test_dataset_id%{random_suffix}" +} + +resource "google_bigquery_routine" "remote_function_routine" { + dataset_id = "${google_bigquery_dataset.test.dataset_id}" + routine_id = "tf_test_routine_id%{random_suffix}" + routine_type = "SCALAR_FUNCTION" + definition_body = "" + + return_type = "{\"typeKind\" : \"STRING\"}" + + remote_function_options { + endpoint = google_cloudfunctions2_function.default2.service_config[0].uri + connection = "${google_bigquery_connection.test2.name}" + max_batching_rows = "5" + user_defined_context = { + "z": "1.2", + "w": "test", + } + } +} +`, context) +} diff --git a/google-beta/services/bigquery/test-fixtures/function-source.zip b/google-beta/services/bigquery/test-fixtures/function-source.zip new file mode 100644 index 0000000000000000000000000000000000000000..1cb571888ef575c261c2c42e8315daddbb653b5a GIT binary patch literal 458 zcmWIWW@Zs#U|`^2Fbc5`5s?gR?g8?)fJGP>GV@YWEA+C8U3-1GnhgZpzFQs@;p$xQ zlJUr;rRFcLUF)$oIeX<#Kvz}y@!q?(c5?5tPquQz7UjxFE@bKtma^5VeBD{q?V7DQ z^YlWIx%GQKdjC`19Mq9~-ML3zm3_;!EVr4>kHXF!oZwj68u?}gv(@5RRo5TbY+ks$ z<==AI@WjY(^?y}9%RiaB$F#`S^w0LRr8gJ?P+Yt`ex_Fd(6xR*%mc&)iOJcC>8U_B z=jVx@-ph4Vfyd$D-Q^rNx7sH!xAR<>A+pmO>_D}O5o3PC@B#&PV36>4;W@Hj!z#UpZ>yZE~?9g?g eyBMKm6Of7Q{s3=QHjpe65EcMw9Yzoj7XSctwvFxp literal 0 HcmV?d00001 diff --git a/website/docs/r/bigquery_routine.html.markdown b/website/docs/r/bigquery_routine.html.markdown index 98e99b1303..1bd75bc74b 100644 --- a/website/docs/r/bigquery_routine.html.markdown +++ b/website/docs/r/bigquery_routine.html.markdown @@ -29,11 +29,11 @@ To get more information about Routine, see: * [Routines Intro](https://cloud.google.com/bigquery/docs/reference/rest/v2/routines) -## Example Usage - Big Query Routine Basic +## Example Usage - Bigquery Routine Basic ```hcl @@ -50,11 +50,11 @@ resource "google_bigquery_routine" "sproc" { } ``` -## Example Usage - Big Query Routine Json +## Example Usage - Bigquery Routine Json ```hcl @@ -81,11 +81,11 @@ resource "google_bigquery_routine" "sproc" { } ``` -## Example Usage - Big Query Routine Tvf +## Example Usage - Bigquery Routine Tvf ```hcl @@ -112,11 +112,11 @@ resource "google_bigquery_routine" "sproc" { } ``` -## Example Usage - Big Query Routine Pyspark +## Example Usage - Bigquery Routine Pyspark ```hcl @@ -163,11 +163,11 @@ resource "google_bigquery_routine" "pyspark" { } ``` -## Example Usage - Big Query Routine Pyspark Mainfile +## Example Usage - Bigquery Routine Pyspark Mainfile ```hcl @@ -198,11 +198,11 @@ resource "google_bigquery_routine" "pyspark_mainfile" { } ``` -## Example Usage - Big Query Routine Spark Jar +## Example Usage - Bigquery Routine Spark Jar ```hcl @@ -235,6 +235,38 @@ resource "google_bigquery_routine" "spark_jar" { } } ``` +## Example Usage - Bigquery Routine Remote Function + + +```hcl +resource "google_bigquery_dataset" "test" { + dataset_id = "dataset_id" +} + +resource "google_bigquery_connection" "test" { + connection_id = "connection_id" + location = "US" + cloud_resource { } +} + +resource "google_bigquery_routine" "remote_function" { + dataset_id = google_bigquery_dataset.test.dataset_id + routine_id = "routine_id" + routine_type = "SCALAR_FUNCTION" + definition_body = "" + + return_type = "{\"typeKind\" : \"STRING\"}" + + remote_function_options { + endpoint = "https://us-east1-my_gcf_project.cloudfunctions.net/remote_add" + connection = google_bigquery_connection.test.name + max_batching_rows = "10" + user_defined_context = { + "z": "1.5", + } + } +} +``` ## Argument Reference @@ -311,6 +343,11 @@ The following arguments are supported: Optional. If language is one of "PYTHON", "JAVA", "SCALA", this field stores the options for spark stored procedure. Structure is [documented below](#nested_spark_options). +* `remote_function_options` - + (Optional) + Remote function specific options. + Structure is [documented below](#nested_remote_function_options). + * `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. @@ -390,6 +427,32 @@ The following arguments are supported: The fully qualified name of a class in jarUris, for example, com.example.wordcount. Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type. +The `remote_function_options` block supports: + +* `endpoint` - + (Optional) + Endpoint of the user-provided remote service, e.g. + `https://us-east1-my_gcf_project.cloudfunctions.net/remote_add` + +* `connection` - + (Optional) + Fully qualified name of the user-provided connection object which holds + the authentication information to send requests to the remote service. + Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" + +* `user_defined_context` - + (Optional) + User-defined context as a set of key/value pairs, which will be sent as function + invocation context together with batched arguments in the requests to the remote + service. The total number of bytes of keys and values must be less than 8KB. + An object containing a list of "key": value pairs. Example: + `{ "name": "wrench", "mass": "1.3kg", "count": "3" }`. + +* `max_batching_rows` - + (Optional) + Max number of rows in each batch sent to the remote service. If absent or if 0, + BigQuery dynamically decides the number of rows in a batch. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are exported: