diff --git a/bigquery/biglake/bigquery_create_object_table/main.tf b/bigquery/biglake/bigquery_create_object_table/main.tf index eec5c5374..62aba4469 100644 --- a/bigquery/biglake/bigquery_create_object_table/main.tf +++ b/bigquery/biglake/bigquery_create_object_table/main.tf @@ -13,34 +13,35 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -/* -* This sample demonstrates how to create an Object Table in BigQuery. -* For more information please refer to: -* https://cloud.google.com/bigquery/docs/object-table-introduction -* https://cloud.google.com/bigquery/docs/object-tables -*/ +/** + * This Terraform example creates an object table in BigQuery. + * For more information, see + * https://cloud.google.com/bigquery/docs/object-table-introduction + * and + * https://cloud.google.com/bigquery/docs/object-tables + */ # [START bigquery_create_object_table] # This queries the provider for project information. -data "google_project" "project" {} +data "google_project" "main" {} # This creates a connection in the US region named "my-connection-id". # This connection is used to access the bucket. -resource "google_bigquery_connection" "connection" { +resource "google_bigquery_connection" "default" { connection_id = "my-connection-id" location = "US" cloud_resource {} } # This grants the previous connection IAM role access to the bucket. -resource "google_project_iam_member" "iam-permission" { +resource "google_project_iam_member" "default" { role = "roles/storage.objectViewer" - project = data.google_project.project.project_id - member = "serviceAccount:${google_bigquery_connection.connection.cloud_resource[0].service_account_id}" + project = data.google_project.main.project_id + member = "serviceAccount:${google_bigquery_connection.default.cloud_resource[0].service_account_id}" } # This defines a Google BigQuery dataset. -resource "google_bigquery_dataset" "dataset" { +resource "google_bigquery_dataset" "default" { dataset_id = "my_dataset_id" } @@ -48,31 +49,28 @@ resource "google_bigquery_dataset" "dataset" { resource "random_id" "bucket_name_suffix" { byte_length = 8 } -resource "google_storage_bucket" "bucket" { +resource "google_storage_bucket" "default" { name = "my-bucket-${random_id.bucket_name_suffix.hex}" location = "US" force_destroy = true uniform_bucket_level_access = true } -# This defines Google BigQuery Object Table with Manual metadata caching, and -# storage defined by `source_uris`. -resource "google_bigquery_table" "table" { +# This defines a BigQuery object table with manual metadata caching. +resource "google_bigquery_table" "default" { deletion_protection = false table_id = "my-table-id" - dataset_id = google_bigquery_dataset.dataset.dataset_id + dataset_id = google_bigquery_dataset.default.dataset_id external_data_configuration { - connection_id = google_bigquery_connection.connection.name + connection_id = google_bigquery_connection.default.name autodetect = false # REQUIRED for object tables. object_metadata = "SIMPLE" - + # This defines the source for the prior object table. source_uris = [ - "gs://${google_storage_bucket.bucket.name}/*", + "gs://${google_storage_bucket.default.name}/*", ] - # `MANUAL` for manual metadata refresh - # `AUTOMATIC` for automatic metadata refresh. metadata_cache_mode = "MANUAL" } @@ -81,11 +79,10 @@ resource "google_bigquery_table" "table" { # Interval literal: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#interval_literals # max_staleness = "0-0 0 10:0:0" - # Ensure the connection can access the bucket before table creation. - # Without this dependency, Terraform may try to create the table when - # the connection does not have the correct IAM Role resulting in failures. + # This ensures that the connection can access the bucket + # before Terraform creates a table. depends_on = [ - google_project_iam_member.iam-permission + google_project_iam_member.default ] } # [END bigquery_create_object_table]