Skip to content

Commit

Permalink
Merge branch 'GoogleCloudPlatform:main' into new_wbi
Browse files Browse the repository at this point in the history
  • Loading branch information
bcreddy-gcp authored Oct 26, 2023
2 parents d8a1d5b + db87c12 commit aa88c11
Show file tree
Hide file tree
Showing 10 changed files with 170 additions and 20 deletions.
8 changes: 6 additions & 2 deletions .ci/magician/github/membership.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,14 @@ var (
}

// This is for new team members who are onboarding
trustedContributors = []string{}
trustedContributors = []string{
"BBBmau",
}

// This is for reviewers who are "on vacation": will not receive new review assignments but will still receive re-requests for assigned PRs.
onVacationReviewers = []string{}
onVacationReviewers = []string{
"hao-nan-li",
}
)

type UserType int64
Expand Down
8 changes: 5 additions & 3 deletions docs/content/contribute/create-pr.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,16 @@ weight: 10

1. A reviewer will automatically be assigned to your PR.
1. Creating a new pull request or pushing a new commit automatically triggers our CI pipelines and workflows. After CI starts, downstream diff generation takes about 10 minutes; VCR tests can take up to 2 hours. If you are a community contributor, some tests will only run after approval from a reviewer.
- While convenient, relying on CI to test iterative changes to PRs often adds extreme latency to reviews if there are errors in test configurations or at runtime. We **strongly** recommend you [test your changes locally before pushing]({{< ref "/develop/run-tests" >}}) even after the initial change.
1. If your assigned reviewer does not respond to changes on a pull request within two US business days, ping them on the pull request.

{{< hint info >}}
**TIP:** Speeding up review:
1. [Test your changes locally before pushing]({{< ref "/develop/run-tests" >}}) to iterate faster.
- You can push them and test in parallel as well. New CI runs will preempt old ones where possible.
1. Resolve failed [status checks](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks) quickly
- [Run provider tests locally]({{< ref "/develop/run-tests" >}}) to iterate faster
- Ask your reviewer for help if you get stuck.
1. [Self-review your PR]({{< ref "/contribute/review-pr" >}}) or ask someone you know to review
- Directly ask your reviewer for help if you don't know how to proceed. If there are failed checks they may only check in if there's no progress after a couple days.
1. [Self-review your PR]({{< ref "/contribute/review-pr" >}}) or ask someone else familiar with Terraform to review
{{< /hint >}}


Expand Down
2 changes: 2 additions & 0 deletions mmv1/products/firestore/Field.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ immutable: false
update_verb: :PATCH
update_mask: true
create_verb: :PATCH
error_retry_predicates:
["transport_tpg.FirestoreField409RetryUnderlyingDataChanged"]
description: |
Represents a single field in the database.
Fields are grouped by their "Collection Group", which represent all collections
Expand Down
1 change: 1 addition & 0 deletions mmv1/third_party/terraform/provider/provider.go.erb
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,7 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) {
"google_beyondcorp_app_connector": beyondcorp.DataSourceGoogleBeyondcorpAppConnector(),
"google_beyondcorp_app_gateway": beyondcorp.DataSourceGoogleBeyondcorpAppGateway(),
"google_billing_account": billing.DataSourceGoogleBillingAccount(),
"google_bigquery_dataset": bigquery.DataSourceGoogleBigqueryDataset(),
"google_bigquery_default_service_account": bigquery.DataSourceGoogleBigqueryDefaultServiceAccount(),
"google_certificate_manager_certificate_map": certificatemanager.DataSourceGoogleCertificateManagerCertificateMap(),
"google_cloudbuild_trigger": cloudbuild.DataSourceGoogleCloudBuildTrigger(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
package bigquery

import (
"fmt"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-provider-google/google/tpgresource"
transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport"
)

func DataSourceGoogleBigqueryDataset() *schema.Resource {
dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceBigQueryDataset().Schema)
tpgresource.AddRequiredFieldsToSchema(dsSchema, "dataset_id")
tpgresource.AddOptionalFieldsToSchema(dsSchema, "project")

return &schema.Resource{
Read: dataSourceGoogleBigqueryDatasetRead,
Schema: dsSchema,
}
}

func dataSourceGoogleBigqueryDatasetRead(d *schema.ResourceData, meta interface{}) error {
config := meta.(*transport_tpg.Config)

dataset_id := d.Get("dataset_id").(string)

project, err := tpgresource.GetProject(d, config)
if err != nil {
return fmt.Errorf("Error fetching project: %s", err)
}

id := fmt.Sprintf("projects/%s/datasets/%s", project, dataset_id)
d.SetId(id)
err = resourceBigQueryDatasetRead(d, meta)
if err != nil {
return err
}

if d.Id() == "" {
return fmt.Errorf("%s not found", id)
}

return nil
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
package bigquery_test

import (
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-provider-google/google/acctest"
)

func TestAccDataSourceGoogleBigqueryDataset_basic(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"random_suffix": acctest.RandString(t, 10),
}

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryDatasetDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataSourceGoogleBigqueryDataset_basic(context),
Check: resource.ComposeTestCheckFunc(
acctest.CheckDataSourceStateMatchesResourceState("data.google_bigquery_dataset.bar", "google_bigquery_dataset.foo"),
),
},
},
})
}

func testAccDataSourceGoogleBigqueryDataset_basic(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_bigquery_dataset" "foo" {
dataset_id = "tf_test_ds_%{random_suffix}"
friendly_name = "testing"
description = "This is a test description"
location = "US"
default_table_expiration_ms = 3600000
}
data "google_bigquery_dataset" "bar" {
dataset_id = google_bigquery_dataset.foo.dataset_id
}
`, context)
}
Original file line number Diff line number Diff line change
Expand Up @@ -105,25 +105,23 @@ func TestAccDataprocCluster_basic(t *testing.T) {
})
}

<% if version == "ga" -%>
func TestAccDataprocVirtualCluster_basic(t *testing.T) {
t.Parallel()

var cluster dataproc.Cluster
rnd := acctest.RandString(t, 10)
pid := envvar.GetTestProjectFromEnv()
version := "3.1-dataproc-7"
networkName := acctest.BootstrapSharedTestNetwork(t, "dataproc-cluster")
subnetworkName := acctest.BootstrapSubnet(t, "dataproc-cluster", networkName)
acctest.BootstrapFirewallForDataprocSharedNetwork(t, "dataproc-cluster", networkName)
networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster")
subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName)

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckDataprocClusterDestroy(t),
Steps: []resource.TestStep{
{
Config: testAccDataprocVirtualCluster_basic(pid, rnd, subnetworkName),
Config: testAccDataprocVirtualCluster_basic(pid, rnd, networkName, subnetworkName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.virtual_cluster", &cluster),

Expand All @@ -145,7 +143,6 @@ func TestAccDataprocVirtualCluster_basic(t *testing.T) {
},
})
}
<% end -%>

func TestAccDataprocCluster_withAccelerators(t *testing.T) {
t.Parallel()
Expand Down Expand Up @@ -1257,8 +1254,7 @@ resource "google_dataproc_cluster" "basic" {
`, rnd)
}

<% if version == "ga" -%>
func testAccDataprocVirtualCluster_basic(projectID, rnd, subnetworkName string) string {
func testAccDataprocVirtualCluster_basic(projectID, rnd, networkName, subnetworkName string) string {
return fmt.Sprintf(`
data "google_project" "project" {
project_id = "%s"
Expand All @@ -1267,11 +1263,8 @@ data "google_project" "project" {
resource "google_container_cluster" "primary" {
name = "tf-test-gke-%s"
location = "us-central1-a"
cluster_config {
gce_cluster_config {
subnetwork = "%s"
}
}
network = "%s"
subnetwork = "%s"

initial_node_count = 1

Expand Down Expand Up @@ -1320,9 +1313,8 @@ resource "google_dataproc_cluster" "virtual_cluster" {
}
}
}
`, projectID, rnd, subnetworkName, projectID, rnd, rnd, rnd, rnd, rnd, rnd)
`, projectID, rnd, networkName, subnetworkName, projectID, rnd, rnd, rnd, rnd, rnd, rnd)
}
<% end -%>

func testAccCheckDataprocGkeClusterNodePoolsHaveRoles(cluster *dataproc.Cluster, roles ...string) func(s *terraform.State) error {
return func(s *terraform.State) error {
Expand Down
10 changes: 10 additions & 0 deletions mmv1/third_party/terraform/transport/error_retry_predicates.go
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,16 @@ func DatastoreIndex409Contention(err error) (bool, string) {
return false, ""
}

// relevant for firestore in datastore mode
func FirestoreField409RetryUnderlyingDataChanged(err error) (bool, string) {
if gerr, ok := err.(*googleapi.Error); ok {
if gerr.Code == 409 && strings.Contains(gerr.Body, "Please retry, underlying data changed") {
return true, "underlying data changed - retrying"
}
}
return false, ""
}

func IapClient409Operation(err error) (bool, string) {
if gerr, ok := err.(*googleapi.Error); ok {
if gerr.Code == 409 && strings.Contains(strings.ToLower(gerr.Body), "operation was aborted") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,3 +170,14 @@ func TestIsSwgAutogenRouterRetryableError_notReady(t *testing.T) {
t.Errorf("Error not detected as retryable")
}
}

func TestFirestoreField409_retryUnderlyingDataChanged(t *testing.T) {
err := googleapi.Error{
Code: 409,
Body: "Please retry, underlying data changed",
}
isRetryable, _ := FirestoreField409RetryUnderlyingDataChanged(&err)
if !isRetryable {
t.Errorf("Error not detected as retryable")
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
---
subcategory: "BigQuery"
description: |-
A datasource to retrieve information about a BigQuery dataset.
---

# `google_bigquery_dataset`

Get information about a BigQuery dataset. For more information see
the [official documentation](https://cloud.google.com/bigquery/docs)
and [API](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets).

## Example Usage

```hcl
data "google_bigquery_dataset" "dataset" {
dataset_id = "my-bq-dataset"
project = "my-project"
}
```

## Argument Reference

The following arguments are supported:

* `dataset_id` - (Required) The dataset ID.

* `project` - (Optional) The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.

## Attributes Reference

See [google_bigquery_dataset](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/bigquery_dataset) resource for details of the available attributes.

0 comments on commit aa88c11

Please sign in to comment.