diff --git a/apis/bigquery/v1beta1/dataset_reference.go b/apis/bigquery/v1beta1/dataset_reference.go new file mode 100644 index 0000000000..27df856488 --- /dev/null +++ b/apis/bigquery/v1beta1/dataset_reference.go @@ -0,0 +1,187 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +import ( + "context" + "fmt" + "strings" + + refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/k8s" + apierrors "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +var _ refsv1beta1.ExternalNormalizer = &BigQueryDatasetRef{} + +// BigQueryDatasetRef defines the resource reference to BigQueryDataset, which "External" field +// holds the GCP identifier for the KRM object. +type BigQueryDatasetRef struct { + // A reference to an externally managed BigQueryDataset resource. + // Should be in the format "projects//locations//datasets/". + External string `json:"external,omitempty"` + + // The name of a BigQueryDataset resource. + Name string `json:"name,omitempty"` + + // The namespace of a BigQueryDataset resource. + Namespace string `json:"namespace,omitempty"` + + parent *BigQueryDatasetParent +} + +// NormalizedExternal provision the "External" value for other resource that depends on BigQueryDataset. +// If the "External" is given in the other resource's spec.BigQueryDatasetRef, the given value will be used. +// Otherwise, the "Name" and "Namespace" will be used to query the actual BigQueryDataset object from the cluster. +func (r *BigQueryDatasetRef) NormalizedExternal(ctx context.Context, reader client.Reader, otherNamespace string) (string, error) { + if r.External != "" && r.Name != "" { + return "", fmt.Errorf("cannot specify both name and external on %s reference", BigQueryDatasetGVK.Kind) + } + // From given External + if r.External != "" { + if _, _, err := ParseBigQueryDatasetExternal(r.External); err != nil { + return "", err + } + return r.External, nil + } + + // From the Config Connector object + if r.Namespace == "" { + r.Namespace = otherNamespace + } + key := types.NamespacedName{Name: r.Name, Namespace: r.Namespace} + u := &unstructured.Unstructured{} + u.SetGroupVersionKind(BigQueryDatasetGVK) + if err := reader.Get(ctx, key, u); err != nil { + if apierrors.IsNotFound(err) { + return "", k8s.NewReferenceNotFoundError(u.GroupVersionKind(), key) + } + return "", fmt.Errorf("reading referenced %s %s: %w", BigQueryDatasetGVK, key, err) + } + // Get external from status.externalRef. This is the most trustworthy place. + actualExternalRef, _, err := unstructured.NestedString(u.Object, "status", "externalRef") + if err != nil { + return "", fmt.Errorf("reading status.externalRef: %w", err) + } + if actualExternalRef == "" { + return "", k8s.NewReferenceNotReadyError(u.GroupVersionKind(), key) + } + r.External = actualExternalRef + return r.External, nil +} + +// New builds a BigQueryDatasetRef from the Config Connector BigQueryDataset object. +func NewBigQueryDatasetRef(ctx context.Context, reader client.Reader, obj *BigQueryDataset) (*BigQueryDatasetRef, error) { + id := &BigQueryDatasetRef{} + + // Get Parent + projectRef, err := refsv1beta1.ResolveProject(ctx, reader, obj, obj.Spec.ProjectRef) + if err != nil { + return nil, err + } + projectID := projectRef.ProjectID + if projectID == "" { + return nil, fmt.Errorf("cannot resolve project") + } + location := obj.Spec.Location + id.parent = &BigQueryDatasetParent{ProjectID: projectID, Location: valueOf(location)} + + // Get desired ID + resourceID := valueOf(obj.Spec.ResourceID) + if resourceID == "" { + resourceID = obj.GetName() + } + if resourceID == "" { + return nil, fmt.Errorf("cannot resolve resource ID") + } + + // Use approved External + externalRef := valueOf(obj.Status.ExternalRef) + if externalRef == "" { + id.External = asBigQueryDatasetExternal(id.parent, resourceID) + return id, nil + } + + // Validate desired with actual + actualParent, actualResourceID, err := ParseBigQueryDatasetExternal(externalRef) + if err != nil { + return nil, err + } + if actualParent.ProjectID != projectID { + return nil, fmt.Errorf("spec.projectRef changed, expect %s, got %s", actualParent.ProjectID, projectID) + } + if actualParent.Location != valueOf(location) { + return nil, fmt.Errorf("spec.location changed, expect %s, got %s", actualParent.Location, valueOf(location)) + } + if actualResourceID != resourceID { + return nil, fmt.Errorf("cannot reset `metadata.name` or `spec.resourceID` to %s, since it has already assigned to %s", + resourceID, actualResourceID) + } + id.External = externalRef + id.parent = &BigQueryDatasetParent{ProjectID: projectID, Location: valueOf(location)} + return id, nil +} + +func (r *BigQueryDatasetRef) Parent() (*BigQueryDatasetParent, error) { + if r.parent != nil { + return r.parent, nil + } + if r.External != "" { + parent, _, err := ParseBigQueryDatasetExternal(r.External) + if err != nil { + return nil, err + } + return parent, nil + } + return nil, fmt.Errorf("BigQueryDatasetRef not initialized from `NewBigQueryDatasetRef` or `NormalizedExternal`") +} + +type BigQueryDatasetParent struct { + ProjectID string + Location string +} + +func (p *BigQueryDatasetParent) String() string { + return "projects/" + p.ProjectID + "/locations/" + p.Location +} + +func asBigQueryDatasetExternal(parent *BigQueryDatasetParent, resourceID string) (external string) { + return parent.String() + "/datasets/" + resourceID +} + +func ParseBigQueryDatasetExternal(external string) (parent *BigQueryDatasetParent, resourceID string, err error) { + external = strings.TrimPrefix(external, "/") + tokens := strings.Split(external, "/") + if len(tokens) != 6 || tokens[0] != "projects" || tokens[2] != "locations" || tokens[4] != "datasets" { + return nil, "", fmt.Errorf("format of BigQueryDataset external=%q was not known (use projects//locations//datasets/)", external) + } + parent = &BigQueryDatasetParent{ + ProjectID: tokens[1], + Location: tokens[3], + } + resourceID = tokens[5] + return parent, resourceID, nil +} + +func valueOf[T any](t *T) T { + var zeroVal T + if t == nil { + return zeroVal + } + return *t +} diff --git a/apis/bigquery/v1beta1/dataset_types.go b/apis/bigquery/v1beta1/dataset_types.go index 3009c1048e..5555bc2cf4 100644 --- a/apis/bigquery/v1beta1/dataset_types.go +++ b/apis/bigquery/v1beta1/dataset_types.go @@ -25,7 +25,7 @@ var BigQueryDatasetGVK = GroupVersion.WithKind("BigQueryDataset") // NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. // BigQueryDatasetSpec defines the desired state of BigQueryDataset -// +kcc:proto=google.cloud.bigquery.v2.dataset +// +kcc:proto=google.cloud.bigquery.v2.Dataset type BigQueryDatasetSpec struct { // The BigQueryDataset name. If not given, the metadata.name will be used. ResourceID *string `json:"resourceID,omitempty"` @@ -94,15 +94,15 @@ type BigQueryDatasetSpec struct { // The geographic location where the dataset should reside. See // https://cloud.google.com/bigquery/docs/locations for supported // locations. - Location *string `json:"location,omitempty"` + // +required + Location *string `json:"location"` // Optional. Defines the time travel window in hours. The value can be from 48 // to 168 hours (2 to 7 days). The default value is 168 hours if this is not // set. MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty"` - // The project that this resource belongs to. - // optional. + // Optional. The project that this resource belongs to. ProjectRef *refs.ProjectRef `json:"projectRef,omitempty"` // Optional. Updates storage_billing_model for the dataset. @@ -122,6 +122,9 @@ type BigQueryDatasetStatus struct { // Output only. A hash of the resource. Etag *string `json:"etag,omitempty"` + // A unique specifier for the BigQueryAnalyticsHubDataExchangeListing resource in GCP. + ExternalRef *string `json:"externalRef,omitempty"` + // Output only. The date when this dataset was last modified, in milliseconds // since the epoch. LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` diff --git a/apis/bigquery/v1beta1/types.generated.go b/apis/bigquery/v1beta1/types.generated.go index dc76ed20c1..42b796f676 100644 --- a/apis/bigquery/v1beta1/types.generated.go +++ b/apis/bigquery/v1beta1/types.generated.go @@ -14,9 +14,7 @@ package v1beta1 -import ( - refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" -) +import refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" // +kcc:proto=google.cloud.bigquery.v2.Access type Access struct { @@ -85,688 +83,30 @@ type Access struct { Dataset *DatasetAccessEntry `json:"dataset,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.AggregationThresholdPolicy -type AggregationThresholdPolicy struct { - // Optional. The threshold for the "aggregation threshold" policy. - Threshold *int64 `json:"threshold,omitempty"` - - // Optional. The privacy unit column(s) associated with this policy. - // For now, only one column per data source object (table, view) is allowed as - // a privacy unit column. - // Representing as a repeated field in metadata for extensibility to - // multiple columns in future. - // Duplicates and Repeated struct fields are not allowed. - // For nested fields, use dot notation ("outer.inner") - PrivacyUnitColumns []string `json:"privacyUnitColumns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.AvroOptions -type AvroOptions struct { - // Optional. If sourceFormat is set to "AVRO", indicates whether to interpret - // logical types as the corresponding BigQuery data type (for example, - // TIMESTAMP), instead of using the raw type (for example, INTEGER). - UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BiEngineReason -type BiEngineReason struct { - // Output only. High-level BI Engine reason for partial or disabled - // acceleration - Code *string `json:"code,omitempty"` - - // Output only. Free form human-readable reason for partial or disabled - // acceleration. - Message *string `json:"message,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BiEngineStatistics -type BiEngineStatistics struct { - // Output only. Specifies which mode of BI Engine acceleration was performed - // (if any). - BiEngineMode *string `json:"biEngineMode,omitempty"` - - // Output only. Specifies which mode of BI Engine acceleration was performed - // (if any). - AccelerationMode *string `json:"accelerationMode,omitempty"` - - // In case of DISABLED or PARTIAL bi_engine_mode, these contain the - // explanatory reasons as to why BI Engine could not accelerate. - // In case the full query was accelerated, this field is not populated. - BiEngineReasons []BiEngineReason `json:"biEngineReasons,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigLakeConfiguration -type BigLakeConfiguration struct { - // Required. The connection specifying the credentials to be used to read and - // write to external storage, such as Cloud Storage. The connection_id can - // have the form `{project}.{location}.{connection_id}` or - // `projects/{project}/locations/{location}/connections/{connection_id}". - ConnectionID *string `json:"connectionID,omitempty"` - - // Required. The fully qualified location prefix of the external folder where - // table data is stored. The '*' wildcard character is not allowed. The URI - // should be in the format `gs://bucket/path_to_table/` - StorageUri *string `json:"storageUri,omitempty"` - - // Required. The file format the table data is stored in. - FileFormat *string `json:"fileFormat,omitempty"` - - // Required. The table format the metadata only snapshots are stored in. - TableFormat *string `json:"tableFormat,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigtableColumn -type BigtableColumn struct { - // [Required] Qualifier of the column. - // Columns in the parent column family that has this exact qualifier are - // exposed as `.` field. - // If the qualifier is valid UTF-8 string, it can be specified in the - // qualifier_string field. Otherwise, a base-64 encoded value must be set to - // qualifier_encoded. - // The column field name is the same as the column qualifier. However, if the - // qualifier is not a valid BigQuery field identifier i.e. does not match - // [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as field_name. - QualifierEncoded *byte `json:"qualifierEncoded,omitempty"` - - // Qualifier string. - QualifierString *string `json:"qualifierString,omitempty"` - - // Optional. If the qualifier is not a valid BigQuery field identifier i.e. - // does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided - // as the column field name and is used as field name in queries. - FieldName *string `json:"fieldName,omitempty"` - - // Optional. The type to convert the value in cells of this column. - // The values are expected to be encoded using HBase Bytes.toBytes function - // when using the BINARY encoding value. - // Following BigQuery types are allowed (case-sensitive): - // - // * BYTES - // * STRING - // * INTEGER - // * FLOAT - // * BOOLEAN - // * JSON - // - // Default type is BYTES. - // 'type' can also be set at the column family level. However, the setting at - // this level takes precedence if 'type' is set at both levels. - Type *string `json:"type,omitempty"` - - // Optional. The encoding of the values when the type is not STRING. - // Acceptable encoding values are: - // TEXT - indicates values are alphanumeric text strings. - // BINARY - indicates values are encoded using HBase Bytes.toBytes family of - // functions. - // 'encoding' can also be set at the column family level. However, the setting - // at this level takes precedence if 'encoding' is set at both levels. - Encoding *string `json:"encoding,omitempty"` - - // Optional. If this is set, only the latest version of value in this column - // are exposed. - // 'onlyReadLatest' can also be set at the column family level. However, the - // setting at this level takes precedence if 'onlyReadLatest' is set at both - // levels. - OnlyReadLatest *bool `json:"onlyReadLatest,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigtableColumnFamily -type BigtableColumnFamily struct { - // Identifier of the column family. - FamilyID *string `json:"familyID,omitempty"` - - // Optional. The type to convert the value in cells of this column family. - // The values are expected to be encoded using HBase Bytes.toBytes function - // when using the BINARY encoding value. - // Following BigQuery types are allowed (case-sensitive): - // - // * BYTES - // * STRING - // * INTEGER - // * FLOAT - // * BOOLEAN - // * JSON - // - // Default type is BYTES. - // This can be overridden for a specific column by listing that column in - // 'columns' and specifying a type for it. - Type *string `json:"type,omitempty"` - - // Optional. The encoding of the values when the type is not STRING. - // Acceptable encoding values are: - // TEXT - indicates values are alphanumeric text strings. - // BINARY - indicates values are encoded using HBase Bytes.toBytes family of - // functions. - // This can be overridden for a specific column by listing that column in - // 'columns' and specifying an encoding for it. - Encoding *string `json:"encoding,omitempty"` - - // Optional. Lists of columns that should be exposed as individual fields as - // opposed to a list of (column name, value) pairs. - // All columns whose qualifier matches a qualifier in this list can be - // accessed as `.`. - // Other columns can be accessed as a list through - // the `.Column` field. - Columns []BigtableColumn `json:"columns,omitempty"` - - // Optional. If this is set only the latest version of value are exposed for - // all columns in this column family. - // This can be overridden for a specific column by listing that column in - // 'columns' and specifying a different setting - // for that column. - OnlyReadLatest *bool `json:"onlyReadLatest,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.BigtableOptions -type BigtableOptions struct { - // Optional. List of column families to expose in the table schema along with - // their types. - // This list restricts the column families that can be referenced in queries - // and specifies their value types. - // You can use this list to do type conversions - see the 'type' field for - // more details. - // If you leave this list empty, all column families are present in the table - // schema and their values are read as BYTES. - // During a query only the column families referenced in that query are read - // from Bigtable. - ColumnFamilies []BigtableColumnFamily `json:"columnFamilies,omitempty"` - - // Optional. If field is true, then the column families that are not - // specified in columnFamilies list are not exposed in the table schema. - // Otherwise, they are read with BYTES type values. - // The default value is false. - IgnoreUnspecifiedColumnFamilies *bool `json:"ignoreUnspecifiedColumnFamilies,omitempty"` - - // Optional. If field is true, then the rowkey column families will be read - // and converted to string. Otherwise they are read with BYTES type values and - // users need to manually cast them with CAST if necessary. - // The default value is false. - ReadRowkeyAsString *bool `json:"readRowkeyAsString,omitempty"` - - // Optional. If field is true, then each column family will be read as a - // single JSON column. Otherwise they are read as a repeated cell structure - // containing timestamp/value tuples. The default value is false. - OutputColumnFamiliesAsJson *bool `json:"outputColumnFamiliesAsJson,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.CloneDefinition -type CloneDefinition struct { - // Required. Reference describing the ID of the table that was cloned. - BaseTableReference *TableReference `json:"baseTableReference,omitempty"` - - // Required. The time at which the base table was cloned. This value is - // reported in the JSON response using RFC3339 format. - CloneTime *string `json:"cloneTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Clustering -type Clustering struct { - // One or more fields on which data should be clustered. Only top-level, - // non-repeated, simple-type fields are supported. The ordering of the - // clustering fields should be prioritized from most to least important - // for filtering purposes. - // - // Additional information on limitations can be found here: - // https://cloud.google.com/bigquery/docs/creating-clustered-tables#limitations - Fields []string `json:"fields,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ColumnReference -type ColumnReference struct { - // Required. The column that composes the foreign key. - ReferencingColumn *string `json:"referencingColumn,omitempty"` - - // Required. The column in the primary key that are referenced by the - // referencing_column. - ReferencedColumn *string `json:"referencedColumn,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ConnectionProperty -type ConnectionProperty struct { - // The key of the property to set. - Key *string `json:"key,omitempty"` - - // The value of the property to set. - Value *string `json:"value,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.CopyJobStatistics -type CopyJobStatistics struct { - // Output only. Number of rows copied to the destination table. - CopiedRows *int64 `json:"copiedRows,omitempty"` - - // Output only. Number of logical bytes copied to the destination table. - CopiedLogicalBytes *int64 `json:"copiedLogicalBytes,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.CsvOptions -type CsvOptions struct { - // Optional. The separator character for fields in a CSV file. The separator - // is interpreted as a single byte. For files encoded in ISO-8859-1, any - // single character can be used as a separator. For files encoded in UTF-8, - // characters represented in decimal range 1-127 (U+0001-U+007F) can be used - // without any modification. UTF-8 characters encoded with multiple bytes - // (i.e. U+0080 and above) will have only the first byte used for separating - // fields. The remaining bytes will be treated as a part of the field. - // BigQuery also supports the escape sequence "\t" (U+0009) to specify a tab - // separator. The default value is comma (",", U+002C). - FieldDelimiter *string `json:"fieldDelimiter,omitempty"` - - // Optional. The number of rows at the top of a CSV file that BigQuery will - // skip when reading the data. The default value is 0. This property is - // useful if you have header rows in the file that should be skipped. - // When autodetect is on, the behavior is the following: - // - // * skipLeadingRows unspecified - Autodetect tries to detect headers in the - // first row. If they are not detected, the row is read as data. Otherwise - // data is read starting from the second row. - // * skipLeadingRows is 0 - Instructs autodetect that there are no headers and - // data should be read starting from the first row. - // * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect - // headers in row N. If headers are not detected, row N is just skipped. - // Otherwise row N is used to extract column names for the detected schema. - SkipLeadingRows *int64 `json:"skipLeadingRows,omitempty"` - - // Optional. The value that is used to quote data sections in a CSV file. - // BigQuery converts the string to ISO-8859-1 encoding, and then uses the - // first byte of the encoded string to split the data in its raw, binary - // state. - // The default value is a float64-quote ("). - // If your data does not contain quoted sections, - // set the property value to an empty string. - // If your data contains quoted newline characters, you must also set the - // allowQuotedNewlines property to true. - // To include the specific quote character within a quoted value, precede it - // with an additional matching quote character. For example, if you want to - // escape the default character ' " ', use ' "" '. - Quote *string `json:"quote,omitempty"` - - // Optional. Indicates if BigQuery should allow quoted data sections that - // contain newline characters in a CSV file. The default value is false. - AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty"` - - // Optional. Indicates if BigQuery should accept rows that are missing - // trailing optional columns. If true, BigQuery treats missing trailing - // columns as null values. - // If false, records with missing trailing columns are treated as bad records, - // and if there are too many bad records, an invalid error is returned in the - // job result. The default value is false. - AllowJaggedRows *bool `json:"allowJaggedRows,omitempty"` - - // Optional. The character encoding of the data. - // The supported values are UTF-8, ISO-8859-1, UTF-16BE, UTF-16LE, UTF-32BE, - // and UTF-32LE. The default value is UTF-8. - // BigQuery decodes the data after the raw, binary data has been split using - // the values of the quote and fieldDelimiter properties. - Encoding *string `json:"encoding,omitempty"` - - // Optional. Indicates if the embedded ASCII control characters (the first 32 - // characters in the ASCII-table, from '\x00' to '\x1F') are preserved. - PreserveAsciiControlCharacters *bool `json:"preserveAsciiControlCharacters,omitempty"` - - // Optional. Specifies a string that represents a null value in a CSV file. - // For example, if you specify "\N", BigQuery interprets "\N" as a null value - // when querying a CSV file. - // The default value is the empty string. If you set this property to a custom - // value, BigQuery throws an error if an empty string is present for all data - // types except for STRING and BYTE. For STRING and BYTE columns, BigQuery - // interprets the empty string as an empty value. - NullMarker *string `json:"nullMarker,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DataFormatOptions -type DataFormatOptions struct { - // Optional. Output timestamp as usec int64. Default is false. - UseInt64Timestamp *bool `json:"useInt64Timestamp,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DataMaskingStatistics -type DataMaskingStatistics struct { - // Whether any accessed data was protected by the data masking. - DataMaskingApplied *bool `json:"dataMaskingApplied,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DataPolicyOption -type DataPolicyOption struct { - // Data policy resource name in the form of - // projects/project_id/locations/location_id/dataPolicies/data_policy_id. - Name *string `json:"name,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Dataset -type Dataset struct { - // Output only. The resource type. - Kind *string `json:"kind,omitempty"` - - // Output only. A hash of the resource. - Etag *string `json:"etag,omitempty"` - - // Output only. The fully-qualified unique name of the dataset in the format - // projectId:datasetId. The dataset name without the project name is given in - // the datasetId field. When creating a new dataset, leave this field blank, - // and instead specify the datasetId field. - ID *string `json:"id,omitempty"` - - // Output only. A URL that can be used to access the resource again. You can - // use this URL in Get or Update requests to the resource. - SelfLink *string `json:"selfLink,omitempty"` - - // Required. A reference that identifies the dataset. - DatasetReference *DatasetReference `json:"datasetReference,omitempty"` - - // Optional. A descriptive name for the dataset. - FriendlyName *string `json:"friendlyName,omitempty"` - - // Optional. A user-friendly description of the dataset. - Description *string `json:"description,omitempty"` - - // Optional. The default lifetime of all tables in the dataset, in - // milliseconds. The minimum lifetime value is 3600000 milliseconds (one - // hour). To clear an existing default expiration with a PATCH request, set to - // 0. Once this property is set, all newly-created tables in the dataset will - // have an expirationTime property set to the creation time plus the value in - // this property, and changing the value will only affect new tables, not - // existing ones. When the expirationTime for a given table is reached, that - // table will be deleted automatically. - // If a table's expirationTime is modified or removed before the table - // expires, or if you provide an explicit expirationTime when creating a - // table, that value takes precedence over the default expiration time - // indicated by this property. - DefaultTableExpirationMs *int64 `json:"defaultTableExpirationMs,omitempty"` - - // This default partition expiration, expressed in milliseconds. - // - // When new time-partitioned tables are created in a dataset where this - // property is set, the table will inherit this value, propagated as the - // `TimePartitioning.expirationMs` property on the new table. If you set - // `TimePartitioning.expirationMs` explicitly when creating a table, - // the `defaultPartitionExpirationMs` of the containing dataset is ignored. - // - // When creating a partitioned table, if `defaultPartitionExpirationMs` - // is set, the `defaultTableExpirationMs` value is ignored and the table - // will not be inherit a table expiration deadline. - DefaultPartitionExpirationMs *int64 `json:"defaultPartitionExpirationMs,omitempty"` - - // The labels associated with this dataset. You can use these - // to organize and group your datasets. - // You can set this property when inserting or updating a dataset. - // See [Creating and Updating Dataset - // Labels](https://cloud.google.com/bigquery/docs/creating-managing-labels#creating_and_updating_dataset_labels) - // for more information. - Labels map[string]string `json:"labels,omitempty"` - - // Optional. An array of objects that define dataset access for one or more - // entities. You can set this property when inserting or updating a dataset in - // order to control who is allowed to access the data. If unspecified at - // dataset creation time, BigQuery adds default dataset access for the - // following entities: access.specialGroup: projectReaders; access.role: - // READER; access.specialGroup: projectWriters; access.role: WRITER; - // access.specialGroup: projectOwners; access.role: OWNER; - // access.userByEmail: [dataset creator email]; access.role: OWNER; - // If you patch a dataset, then this field is overwritten by the patched - // dataset's access field. To add entities, you must supply the entire - // existing access array in addition to any new entities that you want to add. - Access []Access `json:"access,omitempty"` - - // Output only. The time when this dataset was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. The date when this dataset was last modified, in milliseconds - // since the epoch. - LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` - - // The geographic location where the dataset should reside. See - // https://cloud.google.com/bigquery/docs/locations for supported - // locations. - Location *string `json:"location,omitempty"` - - // The default encryption key for all tables in the dataset. - // After this property is set, the encryption key of all newly-created tables - // in the dataset is set to this value unless the table creation request or - // query explicitly overrides the key. - DefaultEncryptionConfiguration *EncryptionConfiguration `json:"defaultEncryptionConfiguration,omitempty"` - - // Output only. Reserved for future use. - SatisfiesPzs *bool `json:"satisfiesPzs,omitempty"` - - // Output only. Reserved for future use. - SatisfiesPzi *bool `json:"satisfiesPzi,omitempty"` - - // Output only. Same as `type` in `ListFormatDataset`. - // The type of the dataset, one of: - // - // * DEFAULT - only accessible by owner and authorized accounts, - // * PUBLIC - accessible by everyone, - // * LINKED - linked dataset, - // * EXTERNAL - dataset with definition in external metadata catalog. - Type *string `json:"type,omitempty"` - - // Optional. The source dataset reference when the dataset is of type LINKED. - // For all other dataset types it is not set. This field cannot be updated - // once it is set. Any attempt to update this field using Update and Patch API - // Operations will be ignored. - LinkedDatasetSource *LinkedDatasetSource `json:"linkedDatasetSource,omitempty"` - - // Output only. Metadata about the LinkedDataset. Filled out when the dataset - // type is LINKED. - LinkedDatasetMetadata *LinkedDatasetMetadata `json:"linkedDatasetMetadata,omitempty"` - - // Optional. Reference to a read-only external dataset defined in data - // catalogs outside of BigQuery. Filled out when the dataset type is EXTERNAL. - ExternalDatasetReference *ExternalDatasetReference `json:"externalDatasetReference,omitempty"` - - // Optional. Options defining open source compatible datasets living in the - // BigQuery catalog. Contains metadata of open source database, schema or - // namespace represented by the current dataset. - ExternalCatalogDatasetOptions *ExternalCatalogDatasetOptions `json:"externalCatalogDatasetOptions,omitempty"` - - // Optional. TRUE if the dataset and its table names are case-insensitive, - // otherwise FALSE. By default, this is FALSE, which means the dataset and its - // table names are case-sensitive. This field does not affect routine - // references. - IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty"` - - // Optional. Defines the default collation specification of future tables - // created in the dataset. If a table is created in this dataset without - // table-level default collation, then the table inherits the dataset default - // collation, which is applied to the string fields that do not have explicit - // collation specified. A change to this field affects only tables created - // afterwards, and does not alter the existing tables. - // The following values are supported: - // - // * 'und:ci': undetermined locale, case insensitive. - // * '': empty string. Default to case-sensitive behavior. - DefaultCollation *string `json:"defaultCollation,omitempty"` - - // Optional. Defines the default rounding mode specification of new tables - // created within this dataset. During table creation, if this field is - // specified, the table within this dataset will inherit the default rounding - // mode of the dataset. Setting the default rounding mode on a table overrides - // this option. Existing tables in the dataset are unaffected. - // If columns are defined during that table creation, - // they will immediately inherit the table's default rounding mode, - // unless otherwise specified. - DefaultRoundingMode *string `json:"defaultRoundingMode,omitempty"` - - // Optional. Defines the time travel window in hours. The value can be from 48 - // to 168 hours (2 to 7 days). The default value is 168 hours if this is not - // set. - MaxTimeTravelHours *int64 `json:"maxTimeTravelHours,omitempty"` - - // Output only. Tags for the dataset. To provide tags as inputs, use the - // `resourceTags` field. - Tags []GcpTag `json:"tags,omitempty"` - - // Optional. Updates storage_billing_model for the dataset. - StorageBillingModel *string `json:"storageBillingModel,omitempty"` - - // Optional. Output only. Restriction config for all tables and dataset. If - // set, restrict certain accesses on the dataset and all its tables based on - // the config. See [Data - // egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress) - // for more details. - Restrictions *RestrictionConfig `json:"restrictions,omitempty"` - - // Optional. The [tags](https://cloud.google.com/bigquery/docs/tags) attached - // to this dataset. Tag keys are globally unique. Tag key is expected to be in - // the namespaced format, for example "123456789012/environment" where - // 123456789012 is the ID of the parent organization or project resource for - // this tag key. Tag value is expected to be the short name, for example - // "Production". See [Tag - // definitions](https://cloud.google.com/iam/docs/tags-access-control#definitions) - // for more details. - ResourceTags map[string]string `json:"resourceTags,omitempty"` -} - // +kcc:proto=google.cloud.bigquery.v2.DatasetAccessEntry type DatasetAccessEntry struct { // The dataset this entry applies to. - Dataset *DatasetReference `json:"dataset"` + // +required + Dataset *DatasetReference `json:"dataset,omitempty"` // Which resources in the dataset this entry applies to. Currently, only // views are supported, but additional target types may be added in the // future. - TargetTypes []string `json:"targetTypes"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DatasetList -type DatasetList struct { - // Output only. The resource type. - // This property always returns the value "bigquery#datasetList" - Kind *string `json:"kind,omitempty"` - - // Output only. A hash value of the results page. You can use this property to - // determine if the page has changed since the last request. - Etag *string `json:"etag,omitempty"` - - // A token that can be used to request the next results page. This property is - // omitted on the final results page. - NextPageToken *string `json:"nextPageToken,omitempty"` - - // An array of the dataset resources in the project. - // Each resource contains basic information. - // For full information about a particular dataset resource, use the Datasets: - // get method. This property is omitted when there are no datasets in the - // project. - Datasets []ListFormatDataset `json:"datasets,omitempty"` - - // A list of skipped locations that were unreachable. For more information - // about BigQuery locations, see: - // https://cloud.google.com/bigquery/docs/locations. Example: "europe-west5" - Unreachable []string `json:"unreachable,omitempty"` + // +required + TargetTypes []string `json:"targetTypes,omitempty"` } // +kcc:proto=google.cloud.bigquery.v2.DatasetReference type DatasetReference struct { - // Required. A unique ID for this dataset, without the project name. The ID + // A unique Id for this dataset, without the project name. The Id // must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). // The maximum length is 1,024 characters. - DatasetId *string `json:"datasetId"` - - // Required. The ID of the project containing this dataset. - ProjectId *string `json:"projectId"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DestinationTableProperties -type DestinationTableProperties struct { - // Optional. Friendly name for the destination table. If the table already - // exists, it should be same as the existing friendly name. - FriendlyName *string `json:"friendlyName,omitempty"` - - // Optional. The description for the destination table. - // This will only be used if the destination table is newly created. - // If the table already exists and a value different than the current - // description is provided, the job will fail. - Description *string `json:"description,omitempty"` - - // Optional. The labels associated with this table. You can use these to - // organize and group your tables. This will only be used if the destination - // table is newly created. If the table already exists and labels are - // different than the current labels are provided, the job will fail. - Labels map[string]string `json:"labels,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DifferentialPrivacyPolicy -type DifferentialPrivacyPolicy struct { - // Optional. The maximum epsilon value that a query can consume. If the - // subscriber specifies epsilon as a parameter in a SELECT query, it must be - // less than or equal to this value. The epsilon parameter controls the amount - // of noise that is added to the groups — a higher epsilon means less noise. - MaxEpsilonPerQuery *float64 `json:"maxEpsilonPerQuery,omitempty"` - - // Optional. The delta value that is used per query. Delta represents the - // probability that any row will fail to be epsilon differentially private. - // Indicates the risk associated with exposing aggregate rows in the result of - // a query. - DeltaPerQuery *float64 `json:"deltaPerQuery,omitempty"` - - // Optional. The maximum groups contributed value that is used per query. - // Represents the maximum number of groups to which each protected entity can - // contribute. Changing this value does not improve or worsen privacy. The - // best value for accuracy and utility depends on the query and data. - MaxGroupsContributed *int64 `json:"maxGroupsContributed,omitempty"` - - // Optional. The privacy unit column associated with this policy. Differential - // privacy policies can only have one privacy unit column per data source - // object (table, view). - PrivacyUnitColumn *string `json:"privacyUnitColumn,omitempty"` - - // Optional. The total epsilon budget for all queries against the - // privacy-protected view. Each subscriber query against this view charges the - // amount of epsilon they request in their query. If there is sufficient - // budget, then the subscriber query attempts to complete. It might still fail - // due to other reasons, in which case the charge is refunded. If there is - // insufficient budget the query is rejected. There might be multiple charge - // attempts if a single query references multiple views. In this case there - // must be sufficient budget for all charges or the query is rejected and - // charges are refunded in best effort. The budget does not have a refresh - // policy and can only be updated via ALTER VIEW or circumvented by creating a - // new view that can be queried with a fresh budget. - EpsilonBudget *float64 `json:"epsilonBudget,omitempty"` - - // Optional. The total delta budget for all queries against the - // privacy-protected view. Each subscriber query against this view charges the - // amount of delta that is pre-defined by the contributor through the privacy - // policy delta_per_query field. If there is sufficient budget, then the - // subscriber query attempts to complete. It might still fail due to other - // reasons, in which case the charge is refunded. If there is insufficient - // budget the query is rejected. There might be multiple charge attempts if a - // single query references multiple views. In this case there must be - // sufficient budget for all charges or the query is rejected and charges are - // refunded in best effort. The budget does not have a refresh policy and can - // only be updated via ALTER VIEW or circumvented by creating a new view that - // can be queried with a fresh budget. - DeltaBudget *float64 `json:"deltaBudget,omitempty"` - - // Output only. The epsilon budget remaining. If budget is exhausted, no more - // queries are allowed. Note that the budget for queries that are in progress - // is deducted before the query executes. If the query fails or is cancelled - // then the budget is refunded. In this case the amount of budget remaining - // can increase. - EpsilonBudgetRemaining *float64 `json:"epsilonBudgetRemaining,omitempty"` - - // Output only. The delta budget remaining. If budget is exhausted, no more - // queries are allowed. Note that the budget for queries that are in progress - // is deducted before the query executes. If the query fails or is cancelled - // then the budget is refunded. In this case the amount of budget remaining - // can increase. - DeltaBudgetRemaining *float64 `json:"deltaBudgetRemaining,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.DmlStats -type DmlStats struct { - // Output only. Number of inserted Rows. Populated by DML INSERT and MERGE - // statements - InsertedRowCount *int64 `json:"insertedRowCount,omitempty"` + // +required + DatasetId *string `json:"datasetId,omitempty"` - // Output only. Number of deleted Rows. populated by DML DELETE, MERGE and - // TRUNCATE statements. - DeletedRowCount *int64 `json:"deletedRowCount,omitempty"` - - // Output only. Number of updated Rows. Populated by DML UPDATE and MERGE - // statements. - UpdatedRowCount *int64 `json:"updatedRowCount,omitempty"` + // The ID of the project containing this dataset. + // +required + ProjectId *string `json:"projectId,omitempty"` } // +kcc:proto=google.cloud.bigquery.v2.EncryptionConfiguration @@ -777,141 +117,6 @@ type EncryptionConfiguration struct { KmsKeyRef *refs.KMSCryptoKeyRef `json:"kmsKeyRef,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.ErrorProto -type ErrorProto struct { - // A short error code that summarizes the error. - Reason *string `json:"reason,omitempty"` - - // Specifies where the error occurred, if present. - Location *string `json:"location,omitempty"` - - // Debugging information. This property is internal to Google and should not - // be used. - DebugInfo *string `json:"debugInfo,omitempty"` - - // A human-readable description of the error. - Message *string `json:"message,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExplainQueryStage -type ExplainQueryStage struct { - // Human-readable name for the stage. - Name *string `json:"name,omitempty"` - - // Unique ID for the stage within the plan. - ID *int64 `json:"id,omitempty"` - - // Stage start time represented as milliseconds since the epoch. - StartMs *int64 `json:"startMs,omitempty"` - - // Stage end time represented as milliseconds since the epoch. - EndMs *int64 `json:"endMs,omitempty"` - - // IDs for stages that are inputs to this stage. - InputStages []int64 `json:"inputStages,omitempty"` - - // Relative amount of time the average shard spent waiting to be - // scheduled. - WaitRatioAvg *float64 `json:"waitRatioAvg,omitempty"` - - // Milliseconds the average shard spent waiting to be scheduled. - WaitMsAvg *int64 `json:"waitMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent waiting to be - // scheduled. - WaitRatioMax *float64 `json:"waitRatioMax,omitempty"` - - // Milliseconds the slowest shard spent waiting to be scheduled. - WaitMsMax *int64 `json:"waitMsMax,omitempty"` - - // Relative amount of time the average shard spent reading input. - ReadRatioAvg *float64 `json:"readRatioAvg,omitempty"` - - // Milliseconds the average shard spent reading input. - ReadMsAvg *int64 `json:"readMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent reading input. - ReadRatioMax *float64 `json:"readRatioMax,omitempty"` - - // Milliseconds the slowest shard spent reading input. - ReadMsMax *int64 `json:"readMsMax,omitempty"` - - // Relative amount of time the average shard spent on CPU-bound tasks. - ComputeRatioAvg *float64 `json:"computeRatioAvg,omitempty"` - - // Milliseconds the average shard spent on CPU-bound tasks. - ComputeMsAvg *int64 `json:"computeMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent on CPU-bound tasks. - ComputeRatioMax *float64 `json:"computeRatioMax,omitempty"` - - // Milliseconds the slowest shard spent on CPU-bound tasks. - ComputeMsMax *int64 `json:"computeMsMax,omitempty"` - - // Relative amount of time the average shard spent on writing output. - WriteRatioAvg *float64 `json:"writeRatioAvg,omitempty"` - - // Milliseconds the average shard spent on writing output. - WriteMsAvg *int64 `json:"writeMsAvg,omitempty"` - - // Relative amount of time the slowest shard spent on writing output. - WriteRatioMax *float64 `json:"writeRatioMax,omitempty"` - - // Milliseconds the slowest shard spent on writing output. - WriteMsMax *int64 `json:"writeMsMax,omitempty"` - - // Total number of bytes written to shuffle. - ShuffleOutputBytes *int64 `json:"shuffleOutputBytes,omitempty"` - - // Total number of bytes written to shuffle and spilled to disk. - ShuffleOutputBytesSpilled *int64 `json:"shuffleOutputBytesSpilled,omitempty"` - - // Number of records read into the stage. - RecordsRead *int64 `json:"recordsRead,omitempty"` - - // Number of records written by the stage. - RecordsWritten *int64 `json:"recordsWritten,omitempty"` - - // Number of parallel input segments to be processed - ParallelInputs *int64 `json:"parallelInputs,omitempty"` - - // Number of parallel input segments completed. - CompletedParallelInputs *int64 `json:"completedParallelInputs,omitempty"` - - // Current status for this stage. - Status *string `json:"status,omitempty"` - - // List of operations within the stage in dependency order (approximately - // chronological). - Steps []ExplainQueryStep `json:"steps,omitempty"` - - // Slot-milliseconds used by the stage. - SlotMs *int64 `json:"slotMs,omitempty"` - - // Output only. Compute mode for this stage. - ComputeMode *string `json:"computeMode,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExplainQueryStep -type ExplainQueryStep struct { - // Machine-readable operation type. - Kind *string `json:"kind,omitempty"` - - // Human-readable description of the step(s). - Substeps []string `json:"substeps,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExportDataStatistics -type ExportDataStatistics struct { - // Number of destination files generated in case of EXPORT DATA - // statement only. - FileCount *int64 `json:"fileCount,omitempty"` - - // [Alpha] Number of destination rows generated in case of EXPORT DATA - // statement only. - RowCount *int64 `json:"rowCount,omitempty"` -} - // +kcc:proto=google.cloud.bigquery.v2.ExternalCatalogDatasetOptions type ExternalCatalogDatasetOptions struct { // Optional. A map of key value pairs defining the parameters and properties @@ -924,240 +129,16 @@ type ExternalCatalogDatasetOptions struct { DefaultStorageLocationUri *string `json:"defaultStorageLocationUri,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.ExternalCatalogTableOptions -type ExternalCatalogTableOptions struct { - // Optional. A map of key value pairs defining the parameters and properties - // of the open source table. Corresponds with hive meta store table - // parameters. Maximum size of 4Mib. - Parameters map[string]string `json:"parameters,omitempty"` - - // Optional. A storage descriptor containing information about the physical - // storage of this table. - StorageDescriptor *StorageDescriptor `json:"storageDescriptor,omitempty"` - - // Optional. The connection specifying the credentials to be used to read - // external storage, such as Azure Blob, Cloud Storage, or S3. The connection - // is needed to read the open source table from BigQuery Engine. The - // connection_id can have the form - // `..` or - // `projects//locations//connections/`. - ConnectionID *string `json:"connectionID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExternalDataConfiguration -type ExternalDataConfiguration struct { - // [Required] The fully-qualified URIs that point to your data in Google - // Cloud. For Google Cloud Storage URIs: - // Each URI can contain one '*' wildcard character and it must come after - // the 'bucket' name. - // Size limits related to load jobs apply to external data sources. - // For Google Cloud Bigtable URIs: - // Exactly one URI can be specified and it has be a fully specified and - // valid HTTPS URL for a Google Cloud Bigtable table. - // For Google Cloud Datastore backups, exactly one URI can be specified. Also, - // the '*' wildcard character is not allowed. - SourceUris []string `json:"sourceUris,omitempty"` - - // Optional. Specifies how source URIs are interpreted for constructing the - // file set to load. By default source URIs are expanded against the - // underlying storage. Other options include specifying manifest files. Only - // applicable to object storage systems. - FileSetSpecType *string `json:"fileSetSpecType,omitempty"` - - // Optional. The schema for the data. - // Schema is required for CSV and JSON formats if autodetect is not on. - // Schema is disallowed for Google Cloud Bigtable, Cloud Datastore backups, - // Avro, ORC and Parquet formats. - Schema *TableSchema `json:"schema,omitempty"` - - // [Required] The data format. - // For CSV files, specify "CSV". - // For Google sheets, specify "GOOGLE_SHEETS". - // For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". - // For Avro files, specify "AVRO". - // For Google Cloud Datastore backups, specify "DATASTORE_BACKUP". - // For Apache Iceberg tables, specify "ICEBERG". - // For ORC files, specify "ORC". - // For Parquet files, specify "PARQUET". - // [Beta] For Google Cloud Bigtable, specify "BIGTABLE". - SourceFormat *string `json:"sourceFormat,omitempty"` - - // Optional. The maximum number of bad records that BigQuery can ignore when - // reading data. If the number of bad records exceeds this value, an invalid - // error is returned in the job result. The default value is 0, which requires - // that all records are valid. This setting is ignored for Google Cloud - // Bigtable, Google Cloud Datastore backups, Avro, ORC and Parquet formats. - MaxBadRecords *int32 `json:"maxBadRecords,omitempty"` - - // Try to detect schema and format options automatically. - // Any option specified explicitly will be honored. - Autodetect *bool `json:"autodetect,omitempty"` - - // Optional. Indicates if BigQuery should allow extra values that are not - // represented in the table schema. - // If true, the extra values are ignored. - // If false, records with extra columns are treated as bad records, and if - // there are too many bad records, an invalid error is returned in the job - // result. - // The default value is false. - // The sourceFormat property determines what BigQuery treats as an extra - // value: - // CSV: Trailing columns - // JSON: Named values that don't match any column names - // Google Cloud Bigtable: This setting is ignored. - // Google Cloud Datastore backups: This setting is ignored. - // Avro: This setting is ignored. - // ORC: This setting is ignored. - // Parquet: This setting is ignored. - IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty"` - - // Optional. The compression type of the data source. - // Possible values include GZIP and NONE. The default value is NONE. - // This setting is ignored for Google Cloud Bigtable, Google Cloud Datastore - // backups, Avro, ORC and Parquet - // formats. An empty string is an invalid value. - Compression *string `json:"compression,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to CSV. - CsvOptions *CsvOptions `json:"csvOptions,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to JSON. - JsonOptions *JsonOptions `json:"jsonOptions,omitempty"` - - // Optional. Additional options if sourceFormat is set to BIGTABLE. - BigtableOptions *BigtableOptions `json:"bigtableOptions,omitempty"` - - // Optional. Additional options if sourceFormat is set to GOOGLE_SHEETS. - GoogleSheetsOptions *GoogleSheetsOptions `json:"googleSheetsOptions,omitempty"` - - // Optional. When set, configures hive partitioning support. Not all storage - // formats support hive partitioning -- requesting hive partitioning on an - // unsupported format will lead to an error, as will providing an invalid - // specification. - HivePartitioningOptions *HivePartitioningOptions `json:"hivePartitioningOptions,omitempty"` - - // Optional. The connection specifying the credentials to be used to read - // external storage, such as Azure Blob, Cloud Storage, or S3. The - // connection_id can have the form - // `{project_id}.{location_id};{connection_id}` or - // `projects/{project_id}/locations/{location_id}/connections/{connection_id}`. - ConnectionID *string `json:"connectionID,omitempty"` - - // Defines the list of possible SQL data types to which the source decimal - // values are converted. This list and the precision and the scale parameters - // of the decimal field determine the target type. In the order of NUMERIC, - // BIGNUMERIC, and STRING, a - // type is picked if it is in the specified list and if it supports the - // precision and the scale. STRING supports all precision and scale values. - // If none of the listed types supports the precision and the scale, the type - // supporting the widest range in the specified list is picked, and if a value - // exceeds the supported range when reading the data, an error will be thrown. - // - // Example: Suppose the value of this field is ["NUMERIC", "BIGNUMERIC"]. - // If (precision,scale) is: - // - // * (38,9) -> NUMERIC; - // * (39,9) -> BIGNUMERIC (NUMERIC cannot hold 30 integer digits); - // * (38,10) -> BIGNUMERIC (NUMERIC cannot hold 10 fractional digits); - // * (76,38) -> BIGNUMERIC; - // * (77,38) -> BIGNUMERIC (error if value exeeds supported range). - // - // This field cannot contain duplicate types. The order of the types in this - // field is ignored. For example, ["BIGNUMERIC", "NUMERIC"] is the same as - // ["NUMERIC", "BIGNUMERIC"] and NUMERIC always takes precedence over - // BIGNUMERIC. - // - // Defaults to ["NUMERIC", "STRING"] for ORC and ["NUMERIC"] for the other - // file formats. - DecimalTargetTypes []string `json:"decimalTargetTypes,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to AVRO. - AvroOptions *AvroOptions `json:"avroOptions,omitempty"` - - // Optional. Load option to be used together with source_format - // newline-delimited JSON to indicate that a variant of JSON is being loaded. - // To load newline-delimited GeoJSON, specify GEOJSON (and source_format must - // be set to NEWLINE_DELIMITED_JSON). - JsonExtension *string `json:"jsonExtension,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to PARQUET. - ParquetOptions *ParquetOptions `json:"parquetOptions,omitempty"` - - // Optional. ObjectMetadata is used to create Object Tables. Object Tables - // contain a listing of objects (with their metadata) found at the - // source_uris. If ObjectMetadata is set, source_format should be omitted. - // - // Currently SIMPLE is the only supported Object Metadata type. - ObjectMetadata *string `json:"objectMetadata,omitempty"` - - // Optional. When creating an external table, the user can provide a reference - // file with the table schema. This is enabled for the following formats: - // AVRO, PARQUET, ORC. - ReferenceFileSchemaUri *string `json:"referenceFileSchemaUri,omitempty"` - - // Optional. Metadata Cache Mode for the table. Set this to enable caching of - // metadata from external data source. - MetadataCacheMode *string `json:"metadataCacheMode,omitempty"` -} - // +kcc:proto=google.cloud.bigquery.v2.ExternalDatasetReference type ExternalDatasetReference struct { - // Required. External source that backs this dataset. - ExternalSource *string `json:"externalSource"` + // +required. External source that backs this dataset. + ExternalSource *string `json:"externalSource,omitempty"` - // Required. The connection id that is used to access the external_source. + // +required. The connection id that is used to access the external_source. // // Format: // projects/{project_id}/locations/{location_id}/connections/{connection_id} - Connection *string `json:"connection"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ExternalServiceCost -type ExternalServiceCost struct { - // External service name. - ExternalService *string `json:"externalService,omitempty"` - - // External service cost in terms of bigquery bytes processed. - BytesProcessed *int64 `json:"bytesProcessed,omitempty"` - - // External service cost in terms of bigquery bytes billed. - BytesBilled *int64 `json:"bytesBilled,omitempty"` - - // External service cost in terms of bigquery slot milliseconds. - SlotMs *int64 `json:"slotMs,omitempty"` - - // Non-preemptable reserved slots used for external job. - // For example, reserved slots for Cloua AI Platform job are the VM usages - // converted to BigQuery slot with equivalent mount of price. - ReservedSlotCount *int64 `json:"reservedSlotCount,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ForeignKey -type ForeignKey struct { - // Optional. Set only if the foreign key constraint is named. - Name *string `json:"name,omitempty"` - - // Required. The table that holds the primary key and is referenced by this - // foreign key. - ReferencedTable *TableReference `json:"referencedTable,omitempty"` - - // Required. The columns that compose the foreign key. - ColumnReferences []ColumnReference `json:"columnReferences,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ForeignTypeInfo -type ForeignTypeInfo struct { - // Required. Specifies the system which defines the foreign data type. - TypeSystem *string `json:"typeSystem,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ForeignViewDefinition -type ForeignViewDefinition struct { - // Required. The query that defines the view. - Query *string `json:"query,omitempty"` - - // Optional. Represents the dialect of the query. - Dialect *string `json:"dialect,omitempty"` + Connection *string `json:"connection,omitempty"` } // +kcc:proto=google.cloud.bigquery.v2.GcpTag @@ -1170,4038 +151,58 @@ type GcpTag struct { TagValue *string `json:"tagValue,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.GoogleSheetsOptions -type GoogleSheetsOptions struct { - // Optional. The number of rows at the top of a sheet that BigQuery will skip - // when reading the data. The default value is 0. This property is useful if - // you have header rows that should be skipped. When autodetect is on, - // the behavior is the following: - // * skipLeadingRows unspecified - Autodetect tries to detect headers in the - // first row. If they are not detected, the row is read as data. Otherwise - // data is read starting from the second row. - // * skipLeadingRows is 0 - Instructs autodetect that there are no headers and - // data should be read starting from the first row. - // * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect - // headers in row N. If headers are not detected, row N is just skipped. - // Otherwise row N is used to extract column names for the detected schema. - SkipLeadingRows *int64 `json:"skipLeadingRows,omitempty"` - - // Optional. Range of a sheet to query from. Only used when non-empty. - // Typical format: sheet_name!top_left_cell_id:bottom_right_cell_id - // For example: sheet1!A1:B20 - Range *string `json:"range,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.LinkedDatasetSource +type LinkedDatasetSource struct { + // The source dataset reference contains project numbers and not project ids. + SourceDataset *DatasetReference `json:"sourceDataset,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.HighCardinalityJoin -type HighCardinalityJoin struct { - // Output only. Count of left input rows. - LeftRows *int64 `json:"leftRows,omitempty"` - - // Output only. Count of right input rows. - RightRows *int64 `json:"rightRows,omitempty"` - - // Output only. Count of the output rows. - OutputRows *int64 `json:"outputRows,omitempty"` - - // Output only. The index of the join operator in the ExplainQueryStep lists. - StepIndex *int32 `json:"stepIndex,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.RestrictionConfig +type RestrictionConfig struct { + // Output only. Specifies the type of dataset/table restriction. + Type *string `json:"type,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.HivePartitioningOptions -type HivePartitioningOptions struct { - // Optional. When set, what mode of hive partitioning to use when reading - // data. The following modes are supported: - // - // * AUTO: automatically infer partition key name(s) and type(s). - // - // * STRINGS: automatically infer partition key name(s). All types are - // strings. - // - // * CUSTOM: partition key schema is encoded in the source URI prefix. - // - // Not all storage formats support hive partitioning. Requesting hive - // partitioning on an unsupported format will lead to an error. - // Currently supported formats are: JSON, CSV, ORC, Avro and Parquet. - Mode *string `json:"mode,omitempty"` - - // Optional. When hive partition detection is requested, a common prefix for - // all source uris must be required. The prefix must end immediately before - // the partition key encoding begins. For example, consider files following - // this data layout: - // - // gs://bucket/path_to_table/dt=2019-06-01/country=USA/id=7/file.avro - // - // gs://bucket/path_to_table/dt=2019-05-31/country=CA/id=3/file.avro - // - // When hive partitioning is requested with either AUTO or STRINGS detection, - // the common prefix can be either of gs://bucket/path_to_table or - // gs://bucket/path_to_table/. - // - // CUSTOM detection requires encoding the partitioning schema immediately - // after the common prefix. For CUSTOM, any of - // - // * gs://bucket/path_to_table/{dt:DATE}/{country:STRING}/{id:INTEGER} - // - // * gs://bucket/path_to_table/{dt:STRING}/{country:STRING}/{id:INTEGER} - // - // * gs://bucket/path_to_table/{dt:DATE}/{country:STRING}/{id:STRING} - // - // would all be valid source URI prefixes. - SourceUriPrefix *string `json:"sourceUriPrefix,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.RoutineReference +type RoutineReference struct { + // The ID of the project containing this routine. + // +required + ProjectId *string `json:"projectId,omitempty"` - // Optional. If set to true, queries over this table require a partition - // filter that can be used for partition elimination to be specified. - // - // Note that this field should only be true when creating a permanent - // external table or querying a temporary external table. - // - // Hive-partitioned loads with require_partition_filter explicitly set to - // true will fail. - RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty"` + // The ID of the dataset containing this routine. + // +required + DatasetId *string `json:"datasetId,omitempty"` - // Output only. For permanent external tables, this field is populated with - // the hive partition keys in the order they were inferred. The types of the - // partition keys can be deduced by checking the table schema (which will - // include the partition keys). Not every API will populate this field in the - // output. For example, Tables.Get will populate it, but Tables.List will not - // contain this field. - Fields []string `json:"fields,omitempty"` + // The Id of the routine. The Id must contain only + // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum + // length is 256 characters. + // +required + RoutineId *string `json:"routineId,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.IndexUnusedReason -type IndexUnusedReason struct { - // Specifies the high-level reason for the scenario when no search index was - // used. - Code *string `json:"code,omitempty"` - - // Free form human-readable reason for the scenario when no search index was - // used. - Message *string `json:"message,omitempty"` +// +kcc:proto=google.cloud.bigquery.v2.TableReference +type TableReference struct { + // The ID of the project containing this table. + // +required + ProjectId *string `json:"projectId,omitempty"` - // Specifies the base table involved in the reason that no search index was - // used. - BaseTable *TableReference `json:"baseTable,omitempty"` + // The ID of the dataset containing this table. + // +required + DatasetId *string `json:"datasetId,omitempty"` - // Specifies the name of the unused search index, if available. - IndexName *string `json:"indexName,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.InputDataChange -type InputDataChange struct { - // Output only. Records read difference percentage compared to a previous run. - RecordsReadDiffPercentage *float64 `json:"recordsReadDiffPercentage,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Job -type Job struct { - // Output only. The type of the resource. - Kind *string `json:"kind,omitempty"` - - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Output only. Opaque ID field of the job. - ID *string `json:"id,omitempty"` - - // Output only. A URL that can be used to access the resource again. - SelfLink *string `json:"selfLink,omitempty"` - - // Output only. Email address of the user who ran the job. - UserEmail *string `json:"userEmail,omitempty"` - - // Required. Describes the job configuration. - Configuration *JobConfiguration `json:"configuration,omitempty"` - - // Optional. Reference describing the unique-per-user name of the job. - JobReference *JobReference `json:"jobReference,omitempty"` - - // Output only. Information about the job, including starting time and ending - // time of the job. - Statistics *JobStatistics `json:"statistics,omitempty"` - - // Output only. The status of this job. Examine this value when polling an - // asynchronous job to see if the job is complete. - Status *JobStatus `json:"status,omitempty"` - - // Output only. [Full-projection-only] String representation of identity of - // requesting party. Populated for both first- and third-party identities. - // Only present for APIs that support third-party identities. - PrincipalSubject *string `json:"principalSubject,omitempty"` - - // Output only. The reason why a Job was created. - // [Preview](https://cloud.google.com/products/#product-launch-stages) - JobCreationReason *JobCreationReason `json:"jobCreationReason,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfiguration -type JobConfiguration struct { - // Output only. The type of the job. Can be QUERY, LOAD, EXTRACT, COPY or - // UNKNOWN. - JobType *string `json:"jobType,omitempty"` - - // [Pick one] Configures a query job. - Query *JobConfigurationQuery `json:"query,omitempty"` - - // [Pick one] Configures a load job. - Load *JobConfigurationLoad `json:"load,omitempty"` - - // [Pick one] Copies a table. - Copy *JobConfigurationTableCopy `json:"copy,omitempty"` - - // [Pick one] Configures an extract job. - Extract *JobConfigurationExtract `json:"extract,omitempty"` - - // Optional. If set, don't actually run this job. A valid query will return - // a mostly empty response with some processing statistics, while an invalid - // query will return the same error it would if it wasn't a dry run. Behavior - // of non-query jobs is undefined. - DryRun *bool `json:"dryRun,omitempty"` - - // Optional. Job timeout in milliseconds. If this time limit is exceeded, - // BigQuery will attempt to stop a longer job, but may not always succeed in - // canceling it before the job completes. For example, a job that takes more - // than 60 seconds to complete has a better chance of being stopped than a job - // that takes 10 seconds to complete. - JobTimeoutMs *int64 `json:"jobTimeoutMs,omitempty"` - - // The labels associated with this job. You can use these to organize and - // group your jobs. - // Label keys and values can be no longer than 63 characters, can only contain - // lowercase letters, numeric characters, underscores and dashes. - // International characters are allowed. Label values are optional. Label - // keys must start with a letter and each label in the list must have a - // different key. - Labels map[string]string `json:"labels,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationExtract -type JobConfigurationExtract struct { - // A reference to the table being exported. - SourceTable *TableReference `json:"sourceTable,omitempty"` - - // A reference to the model being exported. - SourceModel *ModelReference `json:"sourceModel,omitempty"` - - // [Pick one] A list of fully-qualified Google Cloud Storage URIs where the - // extracted table should be written. - DestinationUris []string `json:"destinationUris,omitempty"` - - // Optional. Whether to print out a header row in the results. - // Default is true. Not applicable when extracting models. - PrintHeader *bool `json:"printHeader,omitempty"` - - // Optional. When extracting data in CSV format, this defines the - // delimiter to use between fields in the exported data. - // Default is ','. Not applicable when extracting models. - FieldDelimiter *string `json:"fieldDelimiter,omitempty"` - - // Optional. The exported file format. Possible values include CSV, - // NEWLINE_DELIMITED_JSON, PARQUET, or AVRO for tables and ML_TF_SAVED_MODEL - // or ML_XGBOOST_BOOSTER for models. The default value for tables is CSV. - // Tables with nested or repeated fields cannot be exported as CSV. The - // default value for models is ML_TF_SAVED_MODEL. - DestinationFormat *string `json:"destinationFormat,omitempty"` - - // Optional. The compression type to use for exported files. Possible values - // include DEFLATE, GZIP, NONE, SNAPPY, and ZSTD. The default value is NONE. - // Not all compression formats are support for all file formats. DEFLATE is - // only supported for Avro. ZSTD is only supported for Parquet. Not applicable - // when extracting models. - Compression *string `json:"compression,omitempty"` - - // Whether to use logical types when extracting to AVRO format. Not applicable - // when extracting models. - UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty"` - - // Optional. Model extract options only applicable when extracting models. - ModelExtractOptions *JobConfigurationExtract_ModelExtractOptions `json:"modelExtractOptions,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationExtract.ModelExtractOptions -type JobConfigurationExtract_ModelExtractOptions struct { - // The 1-based ID of the trial to be exported from a hyperparameter tuning - // model. If not specified, the trial with id = - // [Model](https://cloud.google.com/bigquery/docs/reference/rest/v2/models#resource:-model).defaultTrialId - // is exported. This field is ignored for models not trained with - // hyperparameter tuning. - TrialID *int64 `json:"trialID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationLoad -type JobConfigurationLoad struct { - // [Required] The fully-qualified URIs that point to your data in Google - // Cloud. - // For Google Cloud Storage URIs: - // Each URI can contain one '*' wildcard character and it must come after - // the 'bucket' name. Size limits related to load jobs apply to external - // data sources. - // For Google Cloud Bigtable URIs: - // Exactly one URI can be specified and it has be a fully specified and - // valid HTTPS URL for a Google Cloud Bigtable table. - // For Google Cloud Datastore backups: - // Exactly one URI can be specified. Also, the '*' wildcard character is not - // allowed. - SourceUris []string `json:"sourceUris,omitempty"` - - // Optional. Specifies how source URIs are interpreted for constructing the - // file set to load. By default, source URIs are expanded against the - // underlying storage. You can also specify manifest files to control how the - // file set is constructed. This option is only applicable to object storage - // systems. - FileSetSpecType *string `json:"fileSetSpecType,omitempty"` - - // Optional. The schema for the destination table. The schema can be - // omitted if the destination table already exists, or if you're loading data - // from Google Cloud Datastore. - Schema *TableSchema `json:"schema,omitempty"` - - // [Required] The destination table to load the data into. - DestinationTable *TableReference `json:"destinationTable,omitempty"` - - // Optional. [Experimental] Properties with which to create the destination - // table if it is new. - DestinationTableProperties *DestinationTableProperties `json:"destinationTableProperties,omitempty"` - - // Optional. Specifies whether the job is allowed to create new tables. - // The following values are supported: - // - // * CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the - // table. - // * CREATE_NEVER: The table must already exist. If it does not, - // a 'notFound' error is returned in the job result. - // The default value is CREATE_IF_NEEDED. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - CreateDisposition *string `json:"createDisposition,omitempty"` - - // Optional. Specifies the action that occurs if the destination table - // already exists. The following values are supported: - // - // * WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the - // data, removes the constraints and uses the schema from the load job. - // * WRITE_APPEND: If the table already exists, BigQuery appends the data to - // the table. - // * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' - // error is returned in the job result. - // - // The default value is WRITE_APPEND. - // Each action is atomic and only occurs if BigQuery is able to complete the - // job successfully. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - WriteDisposition *string `json:"writeDisposition,omitempty"` - - // Optional. Specifies a string that represents a null value in a CSV file. - // For example, if you specify "\N", BigQuery interprets "\N" as a null value - // when loading a CSV file. - // The default value is the empty string. If you set this property to a custom - // value, BigQuery throws an error if an empty string is present for all data - // types except for STRING and BYTE. For STRING and BYTE columns, BigQuery - // interprets the empty string as an empty value. - NullMarker *string `json:"nullMarker,omitempty"` - - // Optional. The separator character for fields in a CSV file. The separator - // is interpreted as a single byte. For files encoded in ISO-8859-1, any - // single character can be used as a separator. For files encoded in UTF-8, - // characters represented in decimal range 1-127 (U+0001-U+007F) can be used - // without any modification. UTF-8 characters encoded with multiple bytes - // (i.e. U+0080 and above) will have only the first byte used for separating - // fields. The remaining bytes will be treated as a part of the field. - // BigQuery also supports the escape sequence "\t" (U+0009) to specify a tab - // separator. The default value is comma (",", U+002C). - FieldDelimiter *string `json:"fieldDelimiter,omitempty"` - - // Optional. The number of rows at the top of a CSV file that BigQuery will - // skip when loading the data. The default value is 0. This property is useful - // if you have header rows in the file that should be skipped. When autodetect - // is on, the behavior is the following: - // - // * skipLeadingRows unspecified - Autodetect tries to detect headers in the - // first row. If they are not detected, the row is read as data. Otherwise - // data is read starting from the second row. - // * skipLeadingRows is 0 - Instructs autodetect that there are no headers and - // data should be read starting from the first row. - // * skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect - // headers in row N. If headers are not detected, row N is just skipped. - // Otherwise row N is used to extract column names for the detected schema. - SkipLeadingRows *int32 `json:"skipLeadingRows,omitempty"` - - // Optional. The character encoding of the data. - // The supported values are UTF-8, ISO-8859-1, UTF-16BE, UTF-16LE, UTF-32BE, - // and UTF-32LE. The default value is UTF-8. BigQuery decodes the data after - // the raw, binary data has been split using the values of the `quote` and - // `fieldDelimiter` properties. - // - // If you don't specify an encoding, or if you specify a UTF-8 encoding when - // the CSV file is not UTF-8 encoded, BigQuery attempts to convert the data to - // UTF-8. Generally, your data loads successfully, but it may not match - // byte-for-byte what you expect. To avoid this, specify the correct encoding - // by using the `--encoding` flag. - // - // If BigQuery can't convert a character other than the ASCII `0` character, - // BigQuery converts the character to the standard Unicode replacement - // character: �. - Encoding *string `json:"encoding,omitempty"` - - // Optional. The value that is used to quote data sections in a CSV file. - // BigQuery converts the string to ISO-8859-1 encoding, and then uses the - // first byte of the encoded string to split the data in its raw, binary - // state. - // The default value is a float64-quote ('"'). - // If your data does not contain quoted sections, set the property value to an - // empty string. - // If your data contains quoted newline characters, you must also set the - // allowQuotedNewlines property to true. - // To include the specific quote character within a quoted value, precede it - // with an additional matching quote character. For example, if you want to - // escape the default character ' " ', use ' "" '. - // @default " - Quote *string `json:"quote,omitempty"` - - // Optional. The maximum number of bad records that BigQuery can ignore when - // running the job. If the number of bad records exceeds this value, an - // invalid error is returned in the job result. - // The default value is 0, which requires that all records are valid. - // This is only supported for CSV and NEWLINE_DELIMITED_JSON file formats. - MaxBadRecords *int32 `json:"maxBadRecords,omitempty"` - - // Indicates if BigQuery should allow quoted data sections that contain - // newline characters in a CSV file. The default value is false. - AllowQuotedNewlines *bool `json:"allowQuotedNewlines,omitempty"` - - // Optional. The format of the data files. - // For CSV files, specify "CSV". For datastore backups, - // specify "DATASTORE_BACKUP". For newline-delimited JSON, - // specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". - // For parquet, specify "PARQUET". For orc, specify "ORC". - // The default value is CSV. - SourceFormat *string `json:"sourceFormat,omitempty"` - - // Optional. Accept rows that are missing trailing optional columns. - // The missing values are treated as nulls. - // If false, records with missing trailing columns are treated as bad records, - // and if there are too many bad records, an invalid error is returned in the - // job result. - // The default value is false. - // Only applicable to CSV, ignored for other formats. - AllowJaggedRows *bool `json:"allowJaggedRows,omitempty"` - - // Optional. Indicates if BigQuery should allow extra values that are not - // represented in the table schema. - // If true, the extra values are ignored. - // If false, records with extra columns are treated as bad records, and if - // there are too many bad records, an invalid error is returned in the job - // result. The default value is false. - // The sourceFormat property determines what BigQuery treats as an extra - // value: - // CSV: Trailing columns - // JSON: Named values that don't match any column names in the table schema - // Avro, Parquet, ORC: Fields in the file schema that don't exist in the - // table schema. - IgnoreUnknownValues *bool `json:"ignoreUnknownValues,omitempty"` - - // If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity - // properties to load into BigQuery from a Cloud Datastore backup. Property - // names are case sensitive and must be top-level properties. If no properties - // are specified, BigQuery loads all properties. If any named property isn't - // found in the Cloud Datastore backup, an invalid error is returned in the - // job result. - ProjectionFields []string `json:"projectionFields,omitempty"` - - // Optional. Indicates if we should automatically infer the options and - // schema for CSV and JSON sources. - Autodetect *bool `json:"autodetect,omitempty"` - - // Allows the schema of the destination table to be updated as a side effect - // of the load job if a schema is autodetected or supplied in the job - // configuration. - // Schema update options are supported in two cases: - // when writeDisposition is WRITE_APPEND; - // when writeDisposition is WRITE_TRUNCATE and the destination table is a - // partition of a table, specified by partition decorators. For normal tables, - // WRITE_TRUNCATE will always overwrite the schema. - // One or more of the following values are specified: - // - // * ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. - // * ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original - // schema to nullable. - SchemaUpdateOptions []string `json:"schemaUpdateOptions,omitempty"` - - // Time-based partitioning specification for the destination table. Only one - // of timePartitioning and rangePartitioning should be specified. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // Range partitioning specification for the destination table. - // Only one of timePartitioning and rangePartitioning should be specified. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for the destination table. - Clustering *Clustering `json:"clustering,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys) - DestinationEncryptionConfiguration *EncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty"` - - // Optional. If sourceFormat is set to "AVRO", indicates whether to interpret - // logical types as the corresponding BigQuery data type (for example, - // TIMESTAMP), instead of using the raw type (for example, INTEGER). - UseAvroLogicalTypes *bool `json:"useAvroLogicalTypes,omitempty"` - - // Optional. The user can provide a reference file with the reader schema. - // This file is only loaded if it is part of source URIs, but is not loaded - // otherwise. It is enabled for the following formats: AVRO, PARQUET, ORC. - ReferenceFileSchemaUri *string `json:"referenceFileSchemaUri,omitempty"` - - // Optional. When set, configures hive partitioning support. - // Not all storage formats support hive partitioning -- requesting hive - // partitioning on an unsupported format will lead to an error, as will - // providing an invalid specification. - HivePartitioningOptions *HivePartitioningOptions `json:"hivePartitioningOptions,omitempty"` - - // Defines the list of possible SQL data types to which the source decimal - // values are converted. This list and the precision and the scale parameters - // of the decimal field determine the target type. In the order of NUMERIC, - // BIGNUMERIC, and STRING, a - // type is picked if it is in the specified list and if it supports the - // precision and the scale. STRING supports all precision and scale values. - // If none of the listed types supports the precision and the scale, the type - // supporting the widest range in the specified list is picked, and if a value - // exceeds the supported range when reading the data, an error will be thrown. - // - // Example: Suppose the value of this field is ["NUMERIC", "BIGNUMERIC"]. - // If (precision,scale) is: - // - // * (38,9) -> NUMERIC; - // * (39,9) -> BIGNUMERIC (NUMERIC cannot hold 30 integer digits); - // * (38,10) -> BIGNUMERIC (NUMERIC cannot hold 10 fractional digits); - // * (76,38) -> BIGNUMERIC; - // * (77,38) -> BIGNUMERIC (error if value exeeds supported range). - // - // This field cannot contain duplicate types. The order of the types in this - // field is ignored. For example, ["BIGNUMERIC", "NUMERIC"] is the same as - // ["NUMERIC", "BIGNUMERIC"] and NUMERIC always takes precedence over - // BIGNUMERIC. - // - // Defaults to ["NUMERIC", "STRING"] for ORC and ["NUMERIC"] for the other - // file formats. - DecimalTargetTypes []string `json:"decimalTargetTypes,omitempty"` - - // Optional. Load option to be used together with source_format - // newline-delimited JSON to indicate that a variant of JSON is being loaded. - // To load newline-delimited GeoJSON, specify GEOJSON (and source_format must - // be set to NEWLINE_DELIMITED_JSON). - JsonExtension *string `json:"jsonExtension,omitempty"` - - // Optional. Additional properties to set if sourceFormat is set to PARQUET. - ParquetOptions *ParquetOptions `json:"parquetOptions,omitempty"` - - // Optional. When sourceFormat is set to "CSV", this indicates whether the - // embedded ASCII control characters (the first 32 characters in the - // ASCII-table, from - // '\x00' to '\x1F') are preserved. - PreserveAsciiControlCharacters *bool `json:"preserveAsciiControlCharacters,omitempty"` - - // Optional. Connection properties which can modify the load job behavior. - // Currently, only the 'session_id' connection property is supported, and is - // used to resolve _SESSION appearing as the dataset id. - ConnectionProperties []ConnectionProperty `json:"connectionProperties,omitempty"` - - // Optional. If this property is true, the job creates a new session using a - // randomly generated session_id. To continue using a created session with - // subsequent queries, pass the existing session identifier as a - // `ConnectionProperty` value. The session identifier is returned as part of - // the `SessionInfo` message within the query statistics. - // - // The new session's location will be set to `Job.JobReference.location` if it - // is present, otherwise it's set to the default location based on existing - // routing logic. - CreateSession *bool `json:"createSession,omitempty"` - - // Optional. Character map supported for column names in CSV/Parquet loads. - // Defaults to STRICT and can be overridden by Project Config Service. Using - // this option with unsupporting load formats will result in an error. - ColumnNameCharacterMap *string `json:"columnNameCharacterMap,omitempty"` - - // Optional. [Experimental] Configures the load job to copy files directly to - // the destination BigLake managed table, bypassing file content reading and - // rewriting. - // - // Copying files only is supported when all the following are true: - // - // * `source_uris` are located in the same Cloud Storage location as the - // destination table's `storage_uri` location. - // * `source_format` is `PARQUET`. - // * `destination_table` is an existing BigLake managed table. The table's - // schema does not have flexible column names. The table's columns do not - // have type parameters other than precision and scale. - // * No options other than the above are specified. - CopyFilesOnly *bool `json:"copyFilesOnly,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationQuery -type JobConfigurationQuery struct { - // [Required] SQL query text to execute. The useLegacySql field can be used - // to indicate whether the query uses legacy SQL or GoogleSQL. - Query *string `json:"query,omitempty"` - - // Optional. Describes the table where the query results should be stored. - // This property must be set for large results that exceed the maximum - // response size. For queries that produce anonymous (cached) results, this - // field will be populated by BigQuery. - DestinationTable *TableReference `json:"destinationTable,omitempty"` - - // TODO: map type string message for external_table_definitions - - // Describes user-defined function resources used in the query. - UserDefinedFunctionResources []UserDefinedFunctionResource `json:"userDefinedFunctionResources,omitempty"` - - // Optional. Specifies whether the job is allowed to create new tables. - // The following values are supported: - // - // * CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the - // table. - // * CREATE_NEVER: The table must already exist. If it does not, - // a 'notFound' error is returned in the job result. - // - // The default value is CREATE_IF_NEEDED. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - CreateDisposition *string `json:"createDisposition,omitempty"` - - // Optional. Specifies the action that occurs if the destination table - // already exists. The following values are supported: - // - // * WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the - // data, removes the constraints, and uses the schema from the query result. - // * WRITE_APPEND: If the table already exists, BigQuery appends the data to - // the table. - // * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' - // error is returned in the job result. - // - // The default value is WRITE_EMPTY. Each action is atomic and only occurs if - // BigQuery is able to complete the job successfully. Creation, truncation and - // append actions occur as one atomic update upon job completion. - WriteDisposition *string `json:"writeDisposition,omitempty"` - - // Optional. Specifies the default dataset to use for unqualified - // table names in the query. This setting does not alter behavior of - // unqualified dataset names. Setting the system variable - // `@@dataset_id` achieves the same behavior. See - // https://cloud.google.com/bigquery/docs/reference/system-variables for more - // information on system variables. - DefaultDataset *DatasetReference `json:"defaultDataset,omitempty"` - - // Optional. Specifies a priority for the query. Possible values include - // INTERACTIVE and BATCH. The default value is INTERACTIVE. - Priority *string `json:"priority,omitempty"` - - // Optional. If true and query uses legacy SQL dialect, allows the query - // to produce arbitrarily large result tables at a slight cost in performance. - // Requires destinationTable to be set. - // For GoogleSQL queries, this flag is ignored and large results are - // always allowed. However, you must still set destinationTable when result - // size exceeds the allowed maximum response size. - AllowLargeResults *bool `json:"allowLargeResults,omitempty"` - - // Optional. Whether to look for the result in the query cache. The query - // cache is a best-effort cache that will be flushed whenever tables in the - // query are modified. Moreover, the query cache is only available when a - // query does not have a destination table specified. The default value is - // true. - UseQueryCache *bool `json:"useQueryCache,omitempty"` - - // Optional. If true and query uses legacy SQL dialect, flattens all nested - // and repeated fields in the query results. - // allowLargeResults must be true if this is set to false. - // For GoogleSQL queries, this flag is ignored and results are never - // flattened. - FlattenResults *bool `json:"flattenResults,omitempty"` - - // Limits the bytes billed for this job. Queries that will have - // bytes billed beyond this limit will fail (without incurring a charge). - // If unspecified, this will be set to your project default. - MaximumBytesBilled *int64 `json:"maximumBytesBilled,omitempty"` - - // Optional. Specifies whether to use BigQuery's legacy SQL dialect for this - // query. The default value is true. If set to false, the query will use - // BigQuery's GoogleSQL: - // https://cloud.google.com/bigquery/sql-reference/ - // - // When useLegacySql is set to false, the value of flattenResults is ignored; - // query will be run as if flattenResults is false. - UseLegacySql *bool `json:"useLegacySql,omitempty"` - - // GoogleSQL only. Set to POSITIONAL to use positional (?) query parameters - // or to NAMED to use named (@myparam) query parameters in this query. - ParameterMode *string `json:"parameterMode,omitempty"` - - // Query parameters for GoogleSQL queries. - QueryParameters []QueryParameter `json:"queryParameters,omitempty"` - - // Output only. System variables for GoogleSQL queries. A system variable is - // output if the variable is settable and its value differs from the system - // default. - // "@@" prefix is not included in the name of the System variables. - SystemVariables *SystemVariables `json:"systemVariables,omitempty"` - - // Allows the schema of the destination table to be updated as a side effect - // of the query job. Schema update options are supported in two cases: - // when writeDisposition is WRITE_APPEND; - // when writeDisposition is WRITE_TRUNCATE and the destination table is a - // partition of a table, specified by partition decorators. For normal tables, - // WRITE_TRUNCATE will always overwrite the schema. - // One or more of the following values are specified: - // - // * ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. - // * ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original - // schema to nullable. - SchemaUpdateOptions []string `json:"schemaUpdateOptions,omitempty"` - - // Time-based partitioning specification for the destination table. Only one - // of timePartitioning and rangePartitioning should be specified. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // Range partitioning specification for the destination table. - // Only one of timePartitioning and rangePartitioning should be specified. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for the destination table. - Clustering *Clustering `json:"clustering,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys) - DestinationEncryptionConfiguration *EncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty"` - - // Options controlling the execution of scripts. - ScriptOptions *ScriptOptions `json:"scriptOptions,omitempty"` - - // Connection properties which can modify the query behavior. - ConnectionProperties []ConnectionProperty `json:"connectionProperties,omitempty"` - - // If this property is true, the job creates a new session using a randomly - // generated session_id. To continue using a created session with - // subsequent queries, pass the existing session identifier as a - // `ConnectionProperty` value. The session identifier is returned as part of - // the `SessionInfo` message within the query statistics. - // - // The new session's location will be set to `Job.JobReference.location` if it - // is present, otherwise it's set to the default location based on existing - // routing logic. - CreateSession *bool `json:"createSession,omitempty"` - - // Optional. Whether to run the query as continuous or a regular query. - // Continuous query is currently in experimental stage and not ready for - // general usage. - Continuous *bool `json:"continuous,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobConfigurationTableCopy -type JobConfigurationTableCopy struct { - // [Pick one] Source table to copy. - SourceTable *TableReference `json:"sourceTable,omitempty"` - - // [Pick one] Source tables to copy. - SourceTables []TableReference `json:"sourceTables,omitempty"` - - // [Required] The destination table. - DestinationTable *TableReference `json:"destinationTable,omitempty"` - - // Optional. Specifies whether the job is allowed to create new tables. - // The following values are supported: - // - // * CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the - // table. - // * CREATE_NEVER: The table must already exist. If it does not, - // a 'notFound' error is returned in the job result. - // - // The default value is CREATE_IF_NEEDED. - // Creation, truncation and append actions occur as one atomic update - // upon job completion. - CreateDisposition *string `json:"createDisposition,omitempty"` - - // Optional. Specifies the action that occurs if the destination table - // already exists. The following values are supported: - // - // * WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the - // table data and uses the schema and table constraints from the source table. - // * WRITE_APPEND: If the table already exists, BigQuery appends the data to - // the table. - // * WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' - // error is returned in the job result. - // - // The default value is WRITE_EMPTY. Each action is atomic and only occurs if - // BigQuery is able to complete the job successfully. Creation, truncation and - // append actions occur as one atomic update upon job completion. - WriteDisposition *string `json:"writeDisposition,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys). - DestinationEncryptionConfiguration *EncryptionConfiguration `json:"destinationEncryptionConfiguration,omitempty"` - - // Optional. Supported operation types in table copy job. - OperationType *string `json:"operationType,omitempty"` - - // Optional. The time when the destination table expires. Expired tables will - // be deleted and their storage reclaimed. - DestinationExpirationTime *string `json:"destinationExpirationTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobCreationReason -type JobCreationReason struct { - // Output only. Specifies the high level reason why a Job was created. - Code *string `json:"code,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobList -type JobList struct { - // A hash of this page of results. - Etag *string `json:"etag,omitempty"` - - // The resource type of the response. - Kind *string `json:"kind,omitempty"` - - // A token to request the next page of results. - NextPageToken *string `json:"nextPageToken,omitempty"` - - // List of jobs that were requested. - Jobs []ListFormatJob `json:"jobs,omitempty"` - - // A list of skipped locations that were unreachable. For more information - // about BigQuery locations, see: - // https://cloud.google.com/bigquery/docs/locations. Example: "europe-west5" - Unreachable []string `json:"unreachable,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobReference -type JobReference struct { - // Required. The ID of the project containing this job. - ProjectID *string `json:"projectID,omitempty"` - - // Required. The ID of the job. The ID must contain only letters (a-z, A-Z), - // numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 - // characters. - JobID *string `json:"jobID,omitempty"` - - // Optional. The geographic location of the job. The default value is US. - // - // For more information about BigQuery locations, see: - // https://cloud.google.com/bigquery/docs/locations - Location *string `json:"location,omitempty"` - - // This field should not be used. - LocationAlternative []string `json:"locationAlternative,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics -type JobStatistics struct { - // Output only. Creation time of this job, in milliseconds since the epoch. - // This field will be present on all jobs. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. Start time of this job, in milliseconds since the epoch. - // This field will be present when the job transitions from the PENDING state - // to either RUNNING or DONE. - StartTime *int64 `json:"startTime,omitempty"` - - // Output only. End time of this job, in milliseconds since the epoch. This - // field will be present whenever a job is in the DONE state. - EndTime *int64 `json:"endTime,omitempty"` - - // Output only. Total bytes processed for the job. - TotalBytesProcessed *int64 `json:"totalBytesProcessed,omitempty"` - - // Output only. [TrustedTester] Job progress (0.0 -> 1.0) for LOAD and - // EXTRACT jobs. - CompletionRatio *float64 `json:"completionRatio,omitempty"` - - // Output only. Quotas which delayed this job's start time. - QuotaDeferments []string `json:"quotaDeferments,omitempty"` - - // Output only. Statistics for a query job. - Query *JobStatistics2 `json:"query,omitempty"` - - // Output only. Statistics for a load job. - Load *JobStatistics3 `json:"load,omitempty"` - - // Output only. Statistics for an extract job. - Extract *JobStatistics4 `json:"extract,omitempty"` - - // Output only. Statistics for a copy job. - Copy *CopyJobStatistics `json:"copy,omitempty"` - - // Output only. Slot-milliseconds for the job. - TotalSlotMs *int64 `json:"totalSlotMs,omitempty"` - - // Output only. Name of the primary reservation assigned to this job. Note - // that this could be different than reservations reported in the reservation - // usage field if parent reservations were used to execute this job. - ReservationID *string `json:"reservationID,omitempty"` - - // Output only. Number of child jobs executed. - NumChildJobs *int64 `json:"numChildJobs,omitempty"` - - // Output only. If this is a child job, specifies the job ID of the parent. - ParentJobID *string `json:"parentJobID,omitempty"` - - // Output only. If this a child job of a script, specifies information about - // the context of this job within the script. - ScriptStatistics *ScriptStatistics `json:"scriptStatistics,omitempty"` - - // Output only. Statistics for row-level security. Present only for query and - // extract jobs. - RowLevelSecurityStatistics *RowLevelSecurityStatistics `json:"rowLevelSecurityStatistics,omitempty"` - - // Output only. Statistics for data-masking. Present only for query and - // extract jobs. - DataMaskingStatistics *DataMaskingStatistics `json:"dataMaskingStatistics,omitempty"` - - // Output only. [Alpha] Information of the multi-statement transaction if this - // job is part of one. - // - // This property is only expected on a child job or a job that is in a - // session. A script parent job is not part of the transaction started in the - // script. - TransactionInfo *JobStatistics_TransactionInfo `json:"transactionInfo,omitempty"` - - // Output only. Information of the session if this job is part of one. - SessionInfo *SessionInfo `json:"sessionInfo,omitempty"` - - // Output only. The duration in milliseconds of the execution of the final - // attempt of this job, as BigQuery may internally re-attempt to execute the - // job. - FinalExecutionDurationMs *int64 `json:"finalExecutionDurationMs,omitempty"` - - // Output only. Name of edition corresponding to the reservation for this job - // at the time of this update. - Edition *string `json:"edition,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics.TransactionInfo -type JobStatistics_TransactionInfo struct { - // Output only. [Alpha] Id of the transaction. - TransactionID *string `json:"transactionID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics2 -type JobStatistics2 struct { - // Output only. Describes execution plan for the query. - QueryPlan []ExplainQueryStage `json:"queryPlan,omitempty"` - - // Output only. The original estimate of bytes processed for the job. - EstimatedBytesProcessed *int64 `json:"estimatedBytesProcessed,omitempty"` - - // Output only. Describes a timeline of job execution. - Timeline []QueryTimelineSample `json:"timeline,omitempty"` - - // Output only. Total number of partitions processed from all partitioned - // tables referenced in the job. - TotalPartitionsProcessed *int64 `json:"totalPartitionsProcessed,omitempty"` - - // Output only. Total bytes processed for the job. - TotalBytesProcessed *int64 `json:"totalBytesProcessed,omitempty"` - - // Output only. For dry-run jobs, totalBytesProcessed is an estimate and this - // field specifies the accuracy of the estimate. Possible values can be: - // UNKNOWN: accuracy of the estimate is unknown. - // PRECISE: estimate is precise. - // LOWER_BOUND: estimate is lower bound of what the query would cost. - // UPPER_BOUND: estimate is upper bound of what the query would cost. - TotalBytesProcessedAccuracy *string `json:"totalBytesProcessedAccuracy,omitempty"` - - // Output only. If the project is configured to use on-demand pricing, - // then this field contains the total bytes billed for the job. - // If the project is configured to use flat-rate pricing, then you are - // not billed for bytes and this field is informational only. - TotalBytesBilled *int64 `json:"totalBytesBilled,omitempty"` - - // Output only. Billing tier for the job. This is a BigQuery-specific concept - // which is not related to the Google Cloud notion of "free tier". The value - // here is a measure of the query's resource consumption relative to the - // amount of data scanned. For on-demand queries, the limit is 100, and all - // queries within this limit are billed at the standard on-demand rates. - // On-demand queries that exceed this limit will fail with a - // billingTierLimitExceeded error. - BillingTier *int32 `json:"billingTier,omitempty"` - - // Output only. Slot-milliseconds for the job. - TotalSlotMs *int64 `json:"totalSlotMs,omitempty"` - - // Output only. Whether the query result was fetched from the query cache. - CacheHit *bool `json:"cacheHit,omitempty"` - - // Output only. Referenced tables for the job. Queries that reference more - // than 50 tables will not have a complete list. - ReferencedTables []TableReference `json:"referencedTables,omitempty"` - - // Output only. Referenced routines for the job. - ReferencedRoutines []RoutineReference `json:"referencedRoutines,omitempty"` - - // Output only. The schema of the results. Present only for successful dry - // run of non-legacy SQL queries. - Schema *TableSchema `json:"schema,omitempty"` - - // Output only. The number of rows affected by a DML statement. Present - // only for DML statements INSERT, UPDATE or DELETE. - NumDmlAffectedRows *int64 `json:"numDmlAffectedRows,omitempty"` - - // Output only. Detailed statistics for DML statements INSERT, UPDATE, DELETE, - // MERGE or TRUNCATE. - DmlStats *DmlStats `json:"dmlStats,omitempty"` - - // Output only. GoogleSQL only: list of undeclared query - // parameters detected during a dry run validation. - UndeclaredQueryParameters []QueryParameter `json:"undeclaredQueryParameters,omitempty"` - - // Output only. The type of query statement, if valid. - // Possible values: - // - // * `SELECT`: - // [`SELECT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#select_list) - // statement. - // * `ASSERT`: - // [`ASSERT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/debugging-statements#assert) - // statement. - // * `INSERT`: - // [`INSERT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement) - // statement. - // * `UPDATE`: - // [`UPDATE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#update_statement) - // statement. - // * `DELETE`: - // [`DELETE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language) - // statement. - // * `MERGE`: - // [`MERGE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language) - // statement. - // * `CREATE_TABLE`: [`CREATE - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement) - // statement, without `AS SELECT`. - // * `CREATE_TABLE_AS_SELECT`: [`CREATE TABLE AS - // SELECT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#query_statement) - // statement. - // * `CREATE_VIEW`: [`CREATE - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_view_statement) - // statement. - // * `CREATE_MODEL`: [`CREATE - // MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-create#create_model_statement) - // statement. - // * `CREATE_MATERIALIZED_VIEW`: [`CREATE MATERIALIZED - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_materialized_view_statement) - // statement. - // * `CREATE_FUNCTION`: [`CREATE - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement) - // statement. - // * `CREATE_TABLE_FUNCTION`: [`CREATE TABLE - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_function_statement) - // statement. - // * `CREATE_PROCEDURE`: [`CREATE - // PROCEDURE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_procedure) - // statement. - // * `CREATE_ROW_ACCESS_POLICY`: [`CREATE ROW ACCESS - // POLICY`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_row_access_policy_statement) - // statement. - // * `CREATE_SCHEMA`: [`CREATE - // SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) - // statement. - // * `CREATE_SNAPSHOT_TABLE`: [`CREATE SNAPSHOT - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_snapshot_table_statement) - // statement. - // * `CREATE_SEARCH_INDEX`: [`CREATE SEARCH - // INDEX`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_search_index_statement) - // statement. - // * `DROP_TABLE`: [`DROP - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_statement) - // statement. - // * `DROP_EXTERNAL_TABLE`: [`DROP EXTERNAL - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_external_table_statement) - // statement. - // * `DROP_VIEW`: [`DROP - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_view_statement) - // statement. - // * `DROP_MODEL`: [`DROP - // MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-drop-model) - // statement. - // * `DROP_MATERIALIZED_VIEW`: [`DROP MATERIALIZED - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_materialized_view_statement) - // statement. - // * `DROP_FUNCTION` : [`DROP - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_function_statement) - // statement. - // * `DROP_TABLE_FUNCTION` : [`DROP TABLE - // FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_function) - // statement. - // * `DROP_PROCEDURE`: [`DROP - // PROCEDURE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_procedure_statement) - // statement. - // * `DROP_SEARCH_INDEX`: [`DROP SEARCH - // INDEX`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_search_index) - // statement. - // * `DROP_SCHEMA`: [`DROP - // SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_schema_statement) - // statement. - // * `DROP_SNAPSHOT_TABLE`: [`DROP SNAPSHOT - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_snapshot_table_statement) - // statement. - // * `DROP_ROW_ACCESS_POLICY`: [`DROP [ALL] ROW ACCESS - // POLICY|POLICIES`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_row_access_policy_statement) - // statement. - // * `ALTER_TABLE`: [`ALTER - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_table_set_options_statement) - // statement. - // * `ALTER_VIEW`: [`ALTER - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_view_set_options_statement) - // statement. - // * `ALTER_MATERIALIZED_VIEW`: [`ALTER MATERIALIZED - // VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_materialized_view_set_options_statement) - // statement. - // * `ALTER_SCHEMA`: [`ALTER - // SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#aalter_schema_set_options_statement) - // statement. - // * `SCRIPT`: - // [`SCRIPT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language). - // * `TRUNCATE_TABLE`: [`TRUNCATE - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#truncate_table_statement) - // statement. - // * `CREATE_EXTERNAL_TABLE`: [`CREATE EXTERNAL - // TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_external_table_statement) - // statement. - // * `EXPORT_DATA`: [`EXPORT - // DATA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#export_data_statement) - // statement. - // * `EXPORT_MODEL`: [`EXPORT - // MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-export-model) - // statement. - // * `LOAD_DATA`: [`LOAD - // DATA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#load_data_statement) - // statement. - // * `CALL`: - // [`CALL`](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#call) - // statement. - StatementType *string `json:"statementType,omitempty"` - - // Output only. The DDL operation performed, possibly - // dependent on the pre-existence of the DDL target. - DdlOperationPerformed *string `json:"ddlOperationPerformed,omitempty"` - - // Output only. The DDL target table. Present only for - // CREATE/DROP TABLE/VIEW and DROP ALL ROW ACCESS POLICIES queries. - DdlTargetTable *TableReference `json:"ddlTargetTable,omitempty"` - - // Output only. The table after rename. Present only for ALTER TABLE RENAME TO - // query. - DdlDestinationTable *TableReference `json:"ddlDestinationTable,omitempty"` - - // Output only. The DDL target row access policy. Present only for - // CREATE/DROP ROW ACCESS POLICY queries. - DdlTargetRowAccessPolicy *RowAccessPolicyReference `json:"ddlTargetRowAccessPolicy,omitempty"` - - // Output only. The number of row access policies affected by a DDL statement. - // Present only for DROP ALL ROW ACCESS POLICIES queries. - DdlAffectedRowAccessPolicyCount *int64 `json:"ddlAffectedRowAccessPolicyCount,omitempty"` - - // Output only. [Beta] The DDL target routine. Present only for - // CREATE/DROP FUNCTION/PROCEDURE queries. - DdlTargetRoutine *RoutineReference `json:"ddlTargetRoutine,omitempty"` - - // Output only. The DDL target dataset. Present only for CREATE/ALTER/DROP - // SCHEMA(dataset) queries. - DdlTargetDataset *DatasetReference `json:"ddlTargetDataset,omitempty"` - - // Output only. Statistics of a BigQuery ML training job. - MlStatistics *MlStatistics `json:"mlStatistics,omitempty"` - - // Output only. Stats for EXPORT DATA statement. - ExportDataStatistics *ExportDataStatistics `json:"exportDataStatistics,omitempty"` - - // Output only. Job cost breakdown as bigquery internal cost and external - // service costs. - ExternalServiceCosts []ExternalServiceCost `json:"externalServiceCosts,omitempty"` - - // Output only. BI Engine specific Statistics. - BiEngineStatistics *BiEngineStatistics `json:"biEngineStatistics,omitempty"` - - // Output only. Statistics for a LOAD query. - LoadQueryStatistics *LoadQueryStatistics `json:"loadQueryStatistics,omitempty"` - - // Output only. Referenced table for DCL statement. - DclTargetTable *TableReference `json:"dclTargetTable,omitempty"` - - // Output only. Referenced view for DCL statement. - DclTargetView *TableReference `json:"dclTargetView,omitempty"` - - // Output only. Referenced dataset for DCL statement. - DclTargetDataset *DatasetReference `json:"dclTargetDataset,omitempty"` - - // Output only. Search query specific statistics. - SearchStatistics *SearchStatistics `json:"searchStatistics,omitempty"` - - // Output only. Vector Search query specific statistics. - VectorSearchStatistics *VectorSearchStatistics `json:"vectorSearchStatistics,omitempty"` - - // Output only. Performance insights. - PerformanceInsights *PerformanceInsights `json:"performanceInsights,omitempty"` - - // Output only. Query optimization information for a QUERY job. - QueryInfo *QueryInfo `json:"queryInfo,omitempty"` - - // Output only. Statistics of a Spark procedure job. - SparkStatistics *SparkStatistics `json:"sparkStatistics,omitempty"` - - // Output only. Total bytes transferred for cross-cloud queries such as Cross - // Cloud Transfer and CREATE TABLE AS SELECT (CTAS). - TransferredBytes *int64 `json:"transferredBytes,omitempty"` - - // Output only. Statistics of materialized views of a query job. - MaterializedViewStatistics *MaterializedViewStatistics `json:"materializedViewStatistics,omitempty"` - - // Output only. Statistics of metadata cache usage in a query for BigLake - // tables. - MetadataCacheStatistics *MetadataCacheStatistics `json:"metadataCacheStatistics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics3 -type JobStatistics3 struct { - // Output only. Number of source files in a load job. - InputFiles *int64 `json:"inputFiles,omitempty"` - - // Output only. Number of bytes of source data in a load job. - InputFileBytes *int64 `json:"inputFileBytes,omitempty"` - - // Output only. Number of rows imported in a load job. - // Note that while an import job is in the running state, this - // value may change. - OutputRows *int64 `json:"outputRows,omitempty"` - - // Output only. Size of the loaded data in bytes. Note - // that while a load job is in the running state, this value may change. - OutputBytes *int64 `json:"outputBytes,omitempty"` - - // Output only. The number of bad records encountered. Note that if the job - // has failed because of more bad records encountered than the maximum - // allowed in the load job configuration, then this number can be less than - // the total number of bad records present in the input data. - BadRecords *int64 `json:"badRecords,omitempty"` - - // Output only. Describes a timeline of job execution. - Timeline []QueryTimelineSample `json:"timeline,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatistics4 -type JobStatistics4 struct { - // Output only. Number of files per destination URI or URI pattern - // specified in the extract configuration. These values will be in the same - // order as the URIs specified in the 'destinationUris' field. - DestinationUriFileCounts []int64 `json:"destinationUriFileCounts,omitempty"` - - // Output only. Number of user bytes extracted into the result. This is the - // byte count as computed by BigQuery for billing purposes - // and doesn't have any relationship with the number of actual - // result bytes extracted in the desired format. - InputBytes *int64 `json:"inputBytes,omitempty"` - - // Output only. Describes a timeline of job execution. - Timeline []QueryTimelineSample `json:"timeline,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JobStatus -type JobStatus struct { - // Output only. Final error result of the job. If present, indicates that the - // job has completed and was unsuccessful. - ErrorResult *ErrorProto `json:"errorResult,omitempty"` - - // Output only. The first errors encountered during the running of the job. - // The final message includes the number of errors that caused the process to - // stop. Errors here do not necessarily mean that the job has not completed or - // was unsuccessful. - Errors []ErrorProto `json:"errors,omitempty"` - - // Output only. Running state of the job. Valid states include 'PENDING', - // 'RUNNING', and 'DONE'. - State *string `json:"state,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JoinRestrictionPolicy -type JoinRestrictionPolicy struct { - // Optional. Specifies if a join is required or not on queries for the view. - // Default is JOIN_CONDITION_UNSPECIFIED. - JoinCondition *string `json:"joinCondition,omitempty"` - - // Optional. The only columns that joins are allowed on. - // This field is must be specified for join_conditions JOIN_ANY and JOIN_ALL - // and it cannot be set for JOIN_BLOCKED. - JoinAllowedColumns []string `json:"joinAllowedColumns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.JsonOptions -type JsonOptions struct { - // Optional. The character encoding of the data. - // The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, - // and UTF-32LE. The default value is UTF-8. - Encoding *string `json:"encoding,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.LinkedDatasetSource -type LinkedDatasetSource struct { - // The source dataset reference contains project numbers and not project ids. - SourceDataset *DatasetReference `json:"sourceDataset,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatDataset -type ListFormatDataset struct { - // The resource type. - // This property always returns the value "bigquery#dataset" - Kind *string `json:"kind,omitempty"` - - // The fully-qualified, unique, opaque ID of the dataset. - ID *string `json:"id,omitempty"` - - // The dataset reference. - // Use this property to access specific parts of the dataset's ID, such as - // project ID or dataset ID. - DatasetReference *DatasetReference `json:"datasetReference,omitempty"` - - // The labels associated with this dataset. - // You can use these to organize and group your datasets. - Labels map[string]string `json:"labels,omitempty"` - - // An alternate name for the dataset. The friendly name is purely - // decorative in nature. - FriendlyName *string `json:"friendlyName,omitempty"` - - // The geographic location where the dataset resides. - Location *string `json:"location,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatJob -type ListFormatJob struct { - // Unique opaque ID of the job. - ID *string `json:"id,omitempty"` - - // The resource type. - Kind *string `json:"kind,omitempty"` - - // Unique opaque ID of the job. - JobReference *JobReference `json:"jobReference,omitempty"` - - // Running state of the job. When the state is DONE, errorResult can be - // checked to determine whether the job succeeded or failed. - State *string `json:"state,omitempty"` - - // A result object that will be present only if the job has failed. - ErrorResult *ErrorProto `json:"errorResult,omitempty"` - - // Output only. Information about the job, including starting time and ending - // time of the job. - Statistics *JobStatistics `json:"statistics,omitempty"` - - // Required. Describes the job configuration. - Configuration *JobConfiguration `json:"configuration,omitempty"` - - // [Full-projection-only] Describes the status of this job. - Status *JobStatus `json:"status,omitempty"` - - // [Full-projection-only] Email address of the user who ran the job. - UserEmail *string `json:"userEmail,omitempty"` - - // [Full-projection-only] String representation of identity of requesting - // party. Populated for both first- and third-party identities. Only present - // for APIs that support third-party identities. - PrincipalSubject *string `json:"principalSubject,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatTable -type ListFormatTable struct { - // The resource type. - Kind *string `json:"kind,omitempty"` - - // An opaque ID of the table. - ID *string `json:"id,omitempty"` - - // A reference uniquely identifying table. - TableReference *TableReference `json:"tableReference,omitempty"` - - // The user-friendly name for this table. - FriendlyName *string `json:"friendlyName,omitempty"` - - // The type of table. - Type *string `json:"type,omitempty"` - - // The time-based partitioning for this table. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // The range partitioning for this table. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for this table, if configured. - Clustering *Clustering `json:"clustering,omitempty"` - - // The labels associated with this table. You can use these to organize - // and group your tables. - Labels map[string]string `json:"labels,omitempty"` - - // Additional details for a view. - View *ListFormatView `json:"view,omitempty"` - - // Output only. The time when this table was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // The time when this table expires, in milliseconds since the - // epoch. If not present, the table will persist indefinitely. Expired tables - // will be deleted and their storage reclaimed. - ExpirationTime *int64 `json:"expirationTime,omitempty"` - - // Optional. If set to true, queries including this table must specify a - // partition filter. This filter is used for partition elimination. - RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ListFormatView -type ListFormatView struct { - // True if view is defined in legacy SQL dialect, - // false if in GoogleSQL. - UseLegacySql *bool `json:"useLegacySql,omitempty"` - - // Specifics the privacy policy for the view. - PrivacyPolicy *PrivacyPolicy `json:"privacyPolicy,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.LoadQueryStatistics -type LoadQueryStatistics struct { - // Output only. Number of source files in a LOAD query. - InputFiles *int64 `json:"inputFiles,omitempty"` - - // Output only. Number of bytes of source data in a LOAD query. - InputFileBytes *int64 `json:"inputFileBytes,omitempty"` - - // Output only. Number of rows imported in a LOAD query. - // Note that while a LOAD query is in the running state, this value may - // change. - OutputRows *int64 `json:"outputRows,omitempty"` - - // Output only. Size of the loaded data in bytes. Note that while a LOAD query - // is in the running state, this value may change. - OutputBytes *int64 `json:"outputBytes,omitempty"` - - // Output only. The number of bad records encountered while processing a LOAD - // query. Note that if the job has failed because of more bad records - // encountered than the maximum allowed in the load job configuration, then - // this number can be less than the total number of bad records present in the - // input data. - BadRecords *int64 `json:"badRecords,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedView -type MaterializedView struct { - // The candidate materialized view. - TableReference *TableReference `json:"tableReference,omitempty"` - - // Whether the materialized view is chosen for the query. - // - // A materialized view can be chosen to rewrite multiple parts of the same - // query. If a materialized view is chosen to rewrite any part of the query, - // then this field is true, even if the materialized view was not chosen to - // rewrite others parts. - Chosen *bool `json:"chosen,omitempty"` - - // If present, specifies a best-effort estimation of the bytes saved by using - // the materialized view rather than its base tables. - EstimatedBytesSaved *int64 `json:"estimatedBytesSaved,omitempty"` - - // If present, specifies the reason why the materialized view was not chosen - // for the query. - RejectedReason *string `json:"rejectedReason,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedViewDefinition -type MaterializedViewDefinition struct { - // Required. A query whose results are persisted. - Query *string `json:"query,omitempty"` - - // Output only. The time when this materialized view was last refreshed, in - // milliseconds since the epoch. - LastRefreshTime *int64 `json:"lastRefreshTime,omitempty"` - - // Optional. Enable automatic refresh of the materialized view when the base - // table is updated. The default value is "true". - EnableRefresh *bool `json:"enableRefresh,omitempty"` - - // Optional. The maximum frequency at which this materialized view will be - // refreshed. The default value is "1800000" (30 minutes). - RefreshIntervalMs *uint64 `json:"refreshIntervalMs,omitempty"` - - // Optional. This option declares the intention to construct a materialized - // view that isn't refreshed incrementally. - AllowNonIncrementalDefinition *bool `json:"allowNonIncrementalDefinition,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedViewStatistics -type MaterializedViewStatistics struct { - // Materialized views considered for the query job. Only certain materialized - // views are used. For a detailed list, see the child message. - // - // If many materialized views are considered, then the list might be - // incomplete. - MaterializedView []MaterializedView `json:"materializedView,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MaterializedViewStatus -type MaterializedViewStatus struct { - // Output only. Refresh watermark of materialized view. The base tables' data - // were collected into the materialized view cache until this time. - RefreshWatermark *string `json:"refreshWatermark,omitempty"` - - // Output only. Error result of the last automatic refresh. If present, - // indicates that the last automatic refresh was unsuccessful. - LastRefreshStatus *ErrorProto `json:"lastRefreshStatus,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MetadataCacheStatistics -type MetadataCacheStatistics struct { - // Set for the Metadata caching eligible tables referenced in the query. - TableMetadataCacheUsage []TableMetadataCacheUsage `json:"tableMetadataCacheUsage,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.MlStatistics -type MlStatistics struct { - // Output only. Maximum number of iterations specified as max_iterations in - // the 'CREATE MODEL' query. The actual number of iterations may be less than - // this number due to early stop. - MaxIterations *int64 `json:"maxIterations,omitempty"` - - // Results for all completed iterations. - // Empty for [hyperparameter tuning - // jobs](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview). - IterationResults []Model_TrainingRun_IterationResult `json:"iterationResults,omitempty"` - - // Output only. The type of the model that is being trained. - ModelType *string `json:"modelType,omitempty"` - - // Output only. Training type of the job. - TrainingType *string `json:"trainingType,omitempty"` - - // Output only. Trials of a [hyperparameter tuning - // job](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // sorted by trial_id. - HparamTrials []Model_HparamTuningTrial `json:"hparamTrials,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model -type Model struct { - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Required. Unique identifier for this model. - ModelReference *ModelReference `json:"modelReference,omitempty"` - - // Output only. The time when this model was created, in millisecs since the - // epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. The time when this model was last modified, in millisecs since - // the epoch. - LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` - - // Optional. A user-friendly description of this model. - Description *string `json:"description,omitempty"` - - // Optional. A descriptive name for this model. - FriendlyName *string `json:"friendlyName,omitempty"` - - // The labels associated with this model. You can use these to organize - // and group your models. Label keys and values can be no longer - // than 63 characters, can only contain lowercase letters, numeric - // characters, underscores and dashes. International characters are allowed. - // Label values are optional. Label keys must start with a letter and each - // label in the list must have a different key. - Labels map[string]string `json:"labels,omitempty"` - - // Optional. The time when this model expires, in milliseconds since the - // epoch. If not present, the model will persist indefinitely. Expired models - // will be deleted and their storage reclaimed. The defaultTableExpirationMs - // property of the encapsulating dataset can be used to set a default - // expirationTime on newly created models. - ExpirationTime *int64 `json:"expirationTime,omitempty"` - - // Output only. The geographic location where the model resides. This value - // is inherited from the dataset. - Location *string `json:"location,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys). This shows the - // encryption configuration of the model data while stored in BigQuery - // storage. This field can be used with PatchModel to update encryption key - // for an already encrypted model. - EncryptionConfiguration *EncryptionConfiguration `json:"encryptionConfiguration,omitempty"` - - // Output only. Type of the model resource. - ModelType *string `json:"modelType,omitempty"` - - // Information for all training runs in increasing order of start_time. - TrainingRuns []Model_TrainingRun `json:"trainingRuns,omitempty"` - - // Output only. Input feature columns for the model inference. If the model is - // trained with TRANSFORM clause, these are the input of the TRANSFORM clause. - FeatureColumns []StandardSqlField `json:"featureColumns,omitempty"` - - // Output only. Label columns that were used to train this model. - // The output of the model will have a "predicted_" prefix to these columns. - LabelColumns []StandardSqlField `json:"labelColumns,omitempty"` - - // Output only. This field will be populated if a TRANSFORM clause was used to - // train a model. TRANSFORM clause (if used) takes feature_columns as input - // and outputs transform_columns. transform_columns then are used to train the - // model. - TransformColumns []TransformColumn `json:"transformColumns,omitempty"` - - // Output only. All hyperparameter search spaces in this model. - HparamSearchSpaces *Model_HparamSearchSpaces `json:"hparamSearchSpaces,omitempty"` - - // Output only. The default trial_id to use in TVFs when the trial_id is not - // passed in. For single-objective [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, this is the best trial ID. For multi-objective [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, this is the smallest trial ID among all Pareto optimal trials. - DefaultTrialID *int64 `json:"defaultTrialID,omitempty"` - - // Output only. Trials of a [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // model sorted by trial_id. - HparamTrials []Model_HparamTuningTrial `json:"hparamTrials,omitempty"` - - // Output only. For single-objective [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, it only contains the best trial. For multi-objective - // [hyperparameter - // tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) - // models, it contains all Pareto optimal trials sorted by trial_id. - OptimalTrialIds []int64 `json:"optimalTrialIds,omitempty"` - - // Output only. Remote model info - RemoteModelInfo *RemoteModelInfo `json:"remoteModelInfo,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.AggregateClassificationMetrics -type Model_AggregateClassificationMetrics struct { - // Precision is the fraction of actual positive predictions that had - // positive actual labels. For multiclass this is a macro-averaged - // metric treating each class as a binary classifier. - Precision *float64 `json:"precision,omitempty"` - - // Recall is the fraction of actual positive labels that were given a - // positive prediction. For multiclass this is a macro-averaged metric. - Recall *float64 `json:"recall,omitempty"` - - // Accuracy is the fraction of predictions given the correct label. For - // multiclass this is a micro-averaged metric. - Accuracy *float64 `json:"accuracy,omitempty"` - - // Threshold at which the metrics are computed. For binary - // classification models this is the positive class threshold. - // For multi-class classfication models this is the confidence - // threshold. - Threshold *float64 `json:"threshold,omitempty"` - - // The F1 score is an average of recall and precision. For multiclass - // this is a macro-averaged metric. - F1Score *float64 `json:"f1Score,omitempty"` - - // Logarithmic Loss. For multiclass this is a macro-averaged metric. - LogLoss *float64 `json:"logLoss,omitempty"` - - // Area Under a ROC Curve. For multiclass this is a macro-averaged - // metric. - RocAuc *float64 `json:"rocAuc,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaFittingMetrics -type Model_ArimaFittingMetrics struct { - // Log-likelihood. - LogLikelihood *float64 `json:"logLikelihood,omitempty"` - - // AIC. - Aic *float64 `json:"aic,omitempty"` - - // Variance. - Variance *float64 `json:"variance,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaForecastingMetrics -type Model_ArimaForecastingMetrics struct { - // Repeated as there can be many metric sets (one for each model) in - // auto-arima and the large-scale case. - ArimaSingleModelForecastingMetrics []Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics `json:"arimaSingleModelForecastingMetrics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaForecastingMetrics.ArimaSingleModelForecastingMetrics -type Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics struct { - // Non-seasonal order. - NonSeasonalOrder *Model_ArimaOrder `json:"nonSeasonalOrder,omitempty"` - - // Arima fitting metrics. - ArimaFittingMetrics *Model_ArimaFittingMetrics `json:"arimaFittingMetrics,omitempty"` - - // Is arima model fitted with drift or not. It is always false when d - // is not 1. - HasDrift *bool `json:"hasDrift,omitempty"` - - // The time_series_id value for this time series. It will be one of - // the unique values from the time_series_id_column specified during - // ARIMA model training. Only present when time_series_id_column - // training option was used. - TimeSeriesID *string `json:"timeSeriesID,omitempty"` - - // The tuple of time_series_ids identifying this time series. It will - // be one of the unique tuples of values present in the - // time_series_id_columns specified during ARIMA model training. Only - // present when time_series_id_columns training option was used and - // the order of values here are same as the order of - // time_series_id_columns. - TimeSeriesIds []string `json:"timeSeriesIds,omitempty"` - - // Seasonal periods. Repeated because multiple periods are supported - // for one time series. - SeasonalPeriods []string `json:"seasonalPeriods,omitempty"` - - // If true, holiday_effect is a part of time series decomposition result. - HasHolidayEffect *bool `json:"hasHolidayEffect,omitempty"` - - // If true, spikes_and_dips is a part of time series decomposition result. - HasSpikesAndDips *bool `json:"hasSpikesAndDips,omitempty"` - - // If true, step_changes is a part of time series decomposition result. - HasStepChanges *bool `json:"hasStepChanges,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ArimaOrder -type Model_ArimaOrder struct { - // Order of the autoregressive part. - P *int64 `json:"p,omitempty"` - - // Order of the differencing part. - D *int64 `json:"d,omitempty"` - - // Order of the moving-average part. - Q *int64 `json:"q,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.BinaryClassificationMetrics -type Model_BinaryClassificationMetrics struct { - // Aggregate classification metrics. - AggregateClassificationMetrics *Model_AggregateClassificationMetrics `json:"aggregateClassificationMetrics,omitempty"` - - // Binary confusion matrix at multiple thresholds. - BinaryConfusionMatrixList []Model_BinaryClassificationMetrics_BinaryConfusionMatrix `json:"binaryConfusionMatrixList,omitempty"` - - // Label representing the positive class. - PositiveLabel *string `json:"positiveLabel,omitempty"` - - // Label representing the negative class. - NegativeLabel *string `json:"negativeLabel,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix -type Model_BinaryClassificationMetrics_BinaryConfusionMatrix struct { - // Threshold value used when computing each of the following metric. - PositiveClassThreshold *float64 `json:"positiveClassThreshold,omitempty"` - - // Number of true samples predicted as true. - TruePositives *int64 `json:"truePositives,omitempty"` - - // Number of false samples predicted as true. - FalsePositives *int64 `json:"falsePositives,omitempty"` - - // Number of true samples predicted as false. - TrueNegatives *int64 `json:"trueNegatives,omitempty"` - - // Number of false samples predicted as false. - FalseNegatives *int64 `json:"falseNegatives,omitempty"` - - // The fraction of actual positive predictions that had positive actual - // labels. - Precision *float64 `json:"precision,omitempty"` - - // The fraction of actual positive labels that were given a positive - // prediction. - Recall *float64 `json:"recall,omitempty"` - - // The equally weighted average of recall and precision. - F1Score *float64 `json:"f1Score,omitempty"` - - // The fraction of predictions given the correct label. - Accuracy *float64 `json:"accuracy,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.BoostedTreeOptionEnums -type Model_BoostedTreeOptionEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.CategoryEncodingMethod -type Model_CategoryEncodingMethod struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics -type Model_ClusteringMetrics struct { - // Davies-Bouldin index. - DaviesBouldinIndex *float64 `json:"daviesBouldinIndex,omitempty"` - - // Mean of squared distances between each sample to its cluster centroid. - MeanSquaredDistance *float64 `json:"meanSquaredDistance,omitempty"` - - // Information for all clusters. - Clusters []Model_ClusteringMetrics_Cluster `json:"clusters,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster -type Model_ClusteringMetrics_Cluster struct { - // Centroid id. - CentroidID *int64 `json:"centroidID,omitempty"` - - // Values of highly variant features for this cluster. - FeatureValues []Model_ClusteringMetrics_Cluster_FeatureValue `json:"featureValues,omitempty"` - - // Count of training data rows that were assigned to this cluster. - Count *int64 `json:"count,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue -type Model_ClusteringMetrics_Cluster_FeatureValue struct { - // The feature column name. - FeatureColumn *string `json:"featureColumn,omitempty"` - - // The numerical feature value. This is the centroid value for this - // feature. - NumericalValue *float64 `json:"numericalValue,omitempty"` - - // The categorical feature value. - CategoricalValue *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue `json:"categoricalValue,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue -type Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue struct { - // Counts of all categories for the categorical feature. If there are - // more than ten categories, we return top ten (by count) and return - // one more CategoryCount with category "_OTHER_" and count as - // aggregate counts of remaining categories. - CategoryCounts []Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount `json:"categoryCounts,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount -type Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount struct { - // The name of category. - Category *string `json:"category,omitempty"` - - // The count of training samples matching the category within the - // cluster. - Count *int64 `json:"count,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.DataSplitResult -type Model_DataSplitResult struct { - // Table reference of the training data after split. - TrainingTable *TableReference `json:"trainingTable,omitempty"` - - // Table reference of the evaluation data after split. - EvaluationTable *TableReference `json:"evaluationTable,omitempty"` - - // Table reference of the test data after split. - TestTable *TableReference `json:"testTable,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.DimensionalityReductionMetrics -type Model_DimensionalityReductionMetrics struct { - // Total percentage of variance explained by the selected principal - // components. - TotalExplainedVarianceRatio *float64 `json:"totalExplainedVarianceRatio,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.float64HparamSearchSpace -type Model_float64HparamSearchSpace struct { - // Range of the float64 hyperparameter. - Range *Model_float64HparamSearchSpace_float64Range `json:"range,omitempty"` - - // Candidates of the float64 hyperparameter. - Candidates *Model_float64HparamSearchSpace_float64Candidates `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.float64HparamSearchSpace.float64Candidates -type Model_float64HparamSearchSpace_float64Candidates struct { - // Candidates for the float64 parameter in increasing order. - Candidates []float64 `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.float64HparamSearchSpace.float64Range -type Model_float64HparamSearchSpace_float64Range struct { - // Min value of the float64 parameter. - Min *float64 `json:"min,omitempty"` - - // Max value of the float64 parameter. - Max *float64 `json:"max,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.EvaluationMetrics -type Model_EvaluationMetrics struct { - // Populated for regression models and explicit feedback type matrix - // factorization models. - RegressionMetrics *Model_RegressionMetrics `json:"regressionMetrics,omitempty"` - - // Populated for binary classification/classifier models. - BinaryClassificationMetrics *Model_BinaryClassificationMetrics `json:"binaryClassificationMetrics,omitempty"` - - // Populated for multi-class classification/classifier models. - MultiClassClassificationMetrics *Model_MultiClassClassificationMetrics `json:"multiClassClassificationMetrics,omitempty"` - - // Populated for clustering models. - ClusteringMetrics *Model_ClusteringMetrics `json:"clusteringMetrics,omitempty"` - - // Populated for implicit feedback type matrix factorization models. - RankingMetrics *Model_RankingMetrics `json:"rankingMetrics,omitempty"` - - // Populated for ARIMA models. - ArimaForecastingMetrics *Model_ArimaForecastingMetrics `json:"arimaForecastingMetrics,omitempty"` - - // Evaluation metrics when the model is a dimensionality reduction model, - // which currently includes PCA. - DimensionalityReductionMetrics *Model_DimensionalityReductionMetrics `json:"dimensionalityReductionMetrics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.GlobalExplanation -type Model_GlobalExplanation struct { - // A list of the top global explanations. Sorted by absolute value of - // attribution in descending order. - Explanations []Model_GlobalExplanation_Explanation `json:"explanations,omitempty"` - - // Class label for this set of global explanations. Will be empty/null for - // binary logistic and linear regression models. Sorted alphabetically in - // descending order. - ClassLabel *string `json:"classLabel,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.GlobalExplanation.Explanation -type Model_GlobalExplanation_Explanation struct { - // The full feature name. For non-numerical features, will be formatted - // like `.`. Overall size of feature - // name will always be truncated to first 120 characters. - FeatureName *string `json:"featureName,omitempty"` - - // Attribution of feature. - Attribution *float64 `json:"attribution,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.HparamSearchSpaces -type Model_HparamSearchSpaces struct { - // Learning rate of training jobs. - LearnRate *Model_float64HparamSearchSpace `json:"learnRate,omitempty"` - - // L1 regularization coefficient. - L1Reg *Model_float64HparamSearchSpace `json:"l1Reg,omitempty"` - - // L2 regularization coefficient. - L2Reg *Model_float64HparamSearchSpace `json:"l2Reg,omitempty"` - - // Number of clusters for k-means. - NumClusters *Model_IntHparamSearchSpace `json:"numClusters,omitempty"` - - // Number of latent factors to train on. - NumFactors *Model_IntHparamSearchSpace `json:"numFactors,omitempty"` - - // Hidden units for neural network models. - HiddenUnits *Model_IntArrayHparamSearchSpace `json:"hiddenUnits,omitempty"` - - // Mini batch sample size. - BatchSize *Model_IntHparamSearchSpace `json:"batchSize,omitempty"` - - // Dropout probability for dnn model training and boosted tree models - // using dart booster. - Dropout *Model_float64HparamSearchSpace `json:"dropout,omitempty"` - - // Maximum depth of a tree for boosted tree models. - MaxTreeDepth *Model_IntHparamSearchSpace `json:"maxTreeDepth,omitempty"` - - // Subsample the training data to grow tree to prevent overfitting for - // boosted tree models. - Subsample *Model_float64HparamSearchSpace `json:"subsample,omitempty"` - - // Minimum split loss for boosted tree models. - MinSplitLoss *Model_float64HparamSearchSpace `json:"minSplitLoss,omitempty"` - - // Hyperparameter for matrix factoration when implicit feedback type is - // specified. - WalsAlpha *Model_float64HparamSearchSpace `json:"walsAlpha,omitempty"` - - // Booster type for boosted tree models. - BoosterType *Model_StringHparamSearchSpace `json:"boosterType,omitempty"` - - // Number of parallel trees for boosted tree models. - NumParallelTree *Model_IntHparamSearchSpace `json:"numParallelTree,omitempty"` - - // Dart normalization type for boosted tree models. - DartNormalizeType *Model_StringHparamSearchSpace `json:"dartNormalizeType,omitempty"` - - // Tree construction algorithm for boosted tree models. - TreeMethod *Model_StringHparamSearchSpace `json:"treeMethod,omitempty"` - - // Minimum sum of instance weight needed in a child for boosted tree models. - MinTreeChildWeight *Model_IntHparamSearchSpace `json:"minTreeChildWeight,omitempty"` - - // Subsample ratio of columns when constructing each tree for boosted tree - // models. - ColsampleBytree *Model_float64HparamSearchSpace `json:"colsampleBytree,omitempty"` - - // Subsample ratio of columns for each level for boosted tree models. - ColsampleBylevel *Model_float64HparamSearchSpace `json:"colsampleBylevel,omitempty"` - - // Subsample ratio of columns for each node(split) for boosted tree models. - ColsampleBynode *Model_float64HparamSearchSpace `json:"colsampleBynode,omitempty"` - - // Activation functions of neural network models. - ActivationFn *Model_StringHparamSearchSpace `json:"activationFn,omitempty"` - - // Optimizer of TF models. - Optimizer *Model_StringHparamSearchSpace `json:"optimizer,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.HparamTuningEnums -type Model_HparamTuningEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.HparamTuningTrial -type Model_HparamTuningTrial struct { - // 1-based index of the trial. - TrialID *int64 `json:"trialID,omitempty"` - - // Starting time of the trial. - StartTimeMs *int64 `json:"startTimeMs,omitempty"` - - // Ending time of the trial. - EndTimeMs *int64 `json:"endTimeMs,omitempty"` - - // The hyperprameters selected for this trial. - Hparams *Model_TrainingRun_TrainingOptions `json:"hparams,omitempty"` - - // Evaluation metrics of this trial calculated on the test data. - // Empty in Job API. - EvaluationMetrics *Model_EvaluationMetrics `json:"evaluationMetrics,omitempty"` - - // The status of the trial. - Status *string `json:"status,omitempty"` - - // Error message for FAILED and INFEASIBLE trial. - ErrorMessage *string `json:"errorMessage,omitempty"` - - // Loss computed on the training data at the end of trial. - TrainingLoss *float64 `json:"trainingLoss,omitempty"` - - // Loss computed on the eval data at the end of trial. - EvalLoss *float64 `json:"evalLoss,omitempty"` - - // Hyperparameter tuning evaluation metrics of this trial calculated on the - // eval data. Unlike evaluation_metrics, only the fields corresponding to - // the hparam_tuning_objectives are set. - HparamTuningEvaluationMetrics *Model_EvaluationMetrics `json:"hparamTuningEvaluationMetrics,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntArrayHparamSearchSpace -type Model_IntArrayHparamSearchSpace struct { - // Candidates for the int array parameter. - Candidates []Model_IntArrayHparamSearchSpace_IntArray `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntArrayHparamSearchSpace.IntArray -type Model_IntArrayHparamSearchSpace_IntArray struct { - // Elements in the int array. - Elements []int64 `json:"elements,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntHparamSearchSpace -type Model_IntHparamSearchSpace struct { - // Range of the int hyperparameter. - Range *Model_IntHparamSearchSpace_IntRange `json:"range,omitempty"` - - // Candidates of the int hyperparameter. - Candidates *Model_IntHparamSearchSpace_IntCandidates `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntHparamSearchSpace.IntCandidates -type Model_IntHparamSearchSpace_IntCandidates struct { - // Candidates for the int parameter in increasing order. - Candidates []int64 `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.IntHparamSearchSpace.IntRange -type Model_IntHparamSearchSpace_IntRange struct { - // Min value of the int parameter. - Min *int64 `json:"min,omitempty"` - - // Max value of the int parameter. - Max *int64 `json:"max,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.KmeansEnums -type Model_KmeansEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.ModelRegistryOptionEnums -type Model_ModelRegistryOptionEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics -type Model_MultiClassClassificationMetrics struct { - // Aggregate classification metrics. - AggregateClassificationMetrics *Model_AggregateClassificationMetrics `json:"aggregateClassificationMetrics,omitempty"` - - // Confusion matrix at different thresholds. - ConfusionMatrixList []Model_MultiClassClassificationMetrics_ConfusionMatrix `json:"confusionMatrixList,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix -type Model_MultiClassClassificationMetrics_ConfusionMatrix struct { - // Confidence threshold used when computing the entries of the - // confusion matrix. - ConfidenceThreshold *float64 `json:"confidenceThreshold,omitempty"` - - // One row per actual label. - Rows []Model_MultiClassClassificationMetrics_ConfusionMatrix_Row `json:"rows,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry -type Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry struct { - // The predicted label. For confidence_threshold > 0, we will - // also add an entry indicating the number of items under the - // confidence threshold. - PredictedLabel *string `json:"predictedLabel,omitempty"` - - // Number of items being predicted as this label. - ItemCount *int64 `json:"itemCount,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row -type Model_MultiClassClassificationMetrics_ConfusionMatrix_Row struct { - // The original label of this row. - ActualLabel *string `json:"actualLabel,omitempty"` - - // Info describing predicted label distribution. - Entries []Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry `json:"entries,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.PcaSolverOptionEnums -type Model_PcaSolverOptionEnums struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.RankingMetrics -type Model_RankingMetrics struct { - // Calculates a precision per user for all the items by ranking them and - // then averages all the precisions across all the users. - MeanAveragePrecision *float64 `json:"meanAveragePrecision,omitempty"` - - // Similar to the mean squared error computed in regression and explicit - // recommendation models except instead of computing the rating directly, - // the output from evaluate is computed against a preference which is 1 or 0 - // depending on if the rating exists or not. - MeanSquaredError *float64 `json:"meanSquaredError,omitempty"` - - // A metric to determine the goodness of a ranking calculated from the - // predicted confidence by comparing it to an ideal rank measured by the - // original ratings. - NormalizedDiscountedCumulativeGain *float64 `json:"normalizedDiscountedCumulativeGain,omitempty"` - - // Determines the goodness of a ranking by computing the percentile rank - // from the predicted confidence and dividing it by the original rank. - AverageRank *float64 `json:"averageRank,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.RegressionMetrics -type Model_RegressionMetrics struct { - // Mean absolute error. - MeanAbsoluteError *float64 `json:"meanAbsoluteError,omitempty"` - - // Mean squared error. - MeanSquaredError *float64 `json:"meanSquaredError,omitempty"` - - // Mean squared log error. - MeanSquaredLogError *float64 `json:"meanSquaredLogError,omitempty"` - - // Median absolute error. - MedianAbsoluteError *float64 `json:"medianAbsoluteError,omitempty"` - - // R^2 score. This corresponds to r2_score in ML.EVALUATE. - RSquared *float64 `json:"rSquared,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.SeasonalPeriod -type Model_SeasonalPeriod struct { -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.StringHparamSearchSpace -type Model_StringHparamSearchSpace struct { - // Canididates for the string or enum parameter in lower case. - Candidates []string `json:"candidates,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun -type Model_TrainingRun struct { - // Output only. Options that were used for this training run, includes - // user specified and default options that were used. - TrainingOptions *Model_TrainingRun_TrainingOptions `json:"trainingOptions,omitempty"` - - // Output only. The start time of this training run. - StartTime *string `json:"startTime,omitempty"` - - // Output only. Output of each iteration run, results.size() <= - // max_iterations. - Results []Model_TrainingRun_IterationResult `json:"results,omitempty"` - - // Output only. The evaluation metrics over training/eval data that were - // computed at the end of training. - EvaluationMetrics *Model_EvaluationMetrics `json:"evaluationMetrics,omitempty"` - - // Output only. Data split result of the training run. Only set when the - // input data is actually split. - DataSplitResult *Model_DataSplitResult `json:"dataSplitResult,omitempty"` - - // Output only. Global explanation contains the explanation of top features - // on the model level. Applies to both regression and classification models. - ModelLevelGlobalExplanation *Model_GlobalExplanation `json:"modelLevelGlobalExplanation,omitempty"` - - // Output only. Global explanation contains the explanation of top features - // on the class level. Applies to classification models only. - ClassLevelGlobalExplanations []Model_GlobalExplanation `json:"classLevelGlobalExplanations,omitempty"` - - // The model id in the [Vertex AI Model - // Registry](https://cloud.google.com/vertex-ai/docs/model-registry/introduction) - // for this training run. - VertexAiModelID *string `json:"vertexAiModelID,omitempty"` - - // Output only. The model version in the [Vertex AI Model - // Registry](https://cloud.google.com/vertex-ai/docs/model-registry/introduction) - // for this training run. - VertexAiModelVersion *string `json:"vertexAiModelVersion,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult -type Model_TrainingRun_IterationResult struct { - // Index of the iteration, 0 based. - Index *int32 `json:"index,omitempty"` - - // Time taken to run the iteration in milliseconds. - DurationMs *int64 `json:"durationMs,omitempty"` - - // Loss computed on the training data at the end of iteration. - TrainingLoss *float64 `json:"trainingLoss,omitempty"` - - // Loss computed on the eval data at the end of iteration. - EvalLoss *float64 `json:"evalLoss,omitempty"` - - // Learn rate used for this iteration. - LearnRate *float64 `json:"learnRate,omitempty"` - - // Information about top clusters for clustering models. - ClusterInfos []Model_TrainingRun_IterationResult_ClusterInfo `json:"clusterInfos,omitempty"` - - // Arima result. - ArimaResult *Model_TrainingRun_IterationResult_ArimaResult `json:"arimaResult,omitempty"` - - // The information of the principal components. - PrincipalComponentInfos []Model_TrainingRun_IterationResult_PrincipalComponentInfo `json:"principalComponentInfos,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ArimaResult -type Model_TrainingRun_IterationResult_ArimaResult struct { - // This message is repeated because there are multiple arima models - // fitted in auto-arima. For non-auto-arima model, its size is one. - ArimaModelInfo []Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo `json:"arimaModelInfo,omitempty"` - - // Seasonal periods. Repeated because multiple periods are supported for - // one time series. - SeasonalPeriods []string `json:"seasonalPeriods,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ArimaResult.ArimaCoefficients -type Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients struct { - // Auto-regressive coefficients, an array of float64. - AutoRegressiveCoefficients []float64 `json:"autoRegressiveCoefficients,omitempty"` - - // Moving-average coefficients, an array of float64. - MovingAverageCoefficients []float64 `json:"movingAverageCoefficients,omitempty"` - - // Intercept coefficient, just a float64 not an array. - InterceptCoefficient *float64 `json:"interceptCoefficient,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ArimaResult.ArimaModelInfo -type Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo struct { - // Non-seasonal order. - NonSeasonalOrder *Model_ArimaOrder `json:"nonSeasonalOrder,omitempty"` - - // Arima coefficients. - ArimaCoefficients *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients `json:"arimaCoefficients,omitempty"` - - // Arima fitting metrics. - ArimaFittingMetrics *Model_ArimaFittingMetrics `json:"arimaFittingMetrics,omitempty"` - - // Whether Arima model fitted with drift or not. It is always false - // when d is not 1. - HasDrift *bool `json:"hasDrift,omitempty"` - - // The time_series_id value for this time series. It will be one of - // the unique values from the time_series_id_column specified during - // ARIMA model training. Only present when time_series_id_column - // training option was used. - TimeSeriesID *string `json:"timeSeriesID,omitempty"` - - // The tuple of time_series_ids identifying this time series. It will - // be one of the unique tuples of values present in the - // time_series_id_columns specified during ARIMA model training. Only - // present when time_series_id_columns training option was used and - // the order of values here are same as the order of - // time_series_id_columns. - TimeSeriesIds []string `json:"timeSeriesIds,omitempty"` - - // Seasonal periods. Repeated because multiple periods are supported - // for one time series. - SeasonalPeriods []string `json:"seasonalPeriods,omitempty"` - - // If true, holiday_effect is a part of time series decomposition - // result. - HasHolidayEffect *bool `json:"hasHolidayEffect,omitempty"` - - // If true, spikes_and_dips is a part of time series decomposition - // result. - HasSpikesAndDips *bool `json:"hasSpikesAndDips,omitempty"` - - // If true, step_changes is a part of time series decomposition - // result. - HasStepChanges *bool `json:"hasStepChanges,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo -type Model_TrainingRun_IterationResult_ClusterInfo struct { - // Centroid id. - CentroidID *int64 `json:"centroidID,omitempty"` - - // Cluster radius, the average distance from centroid - // to each point assigned to the cluster. - ClusterRadius *float64 `json:"clusterRadius,omitempty"` - - // Cluster size, the total number of points assigned to the cluster. - ClusterSize *int64 `json:"clusterSize,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.PrincipalComponentInfo -type Model_TrainingRun_IterationResult_PrincipalComponentInfo struct { - // Id of the principal component. - PrincipalComponentID *int64 `json:"principalComponentID,omitempty"` - - // Explained variance by this principal component, which is simply the - // eigenvalue. - ExplainedVariance *float64 `json:"explainedVariance,omitempty"` - - // Explained_variance over the total explained variance. - ExplainedVarianceRatio *float64 `json:"explainedVarianceRatio,omitempty"` - - // The explained_variance is pre-ordered in the descending order to - // compute the cumulative explained variance ratio. - CumulativeExplainedVarianceRatio *float64 `json:"cumulativeExplainedVarianceRatio,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions -type Model_TrainingRun_TrainingOptions struct { - // The maximum number of iterations in training. Used only for iterative - // training algorithms. - MaxIterations *int64 `json:"maxIterations,omitempty"` - - // Type of loss function used during training run. - LossType *string `json:"lossType,omitempty"` - - // Learning rate in training. Used only for iterative training algorithms. - LearnRate *float64 `json:"learnRate,omitempty"` - - // L1 regularization coefficient. - L1Regularization *float64 `json:"l1Regularization,omitempty"` - - // L2 regularization coefficient. - L2Regularization *float64 `json:"l2Regularization,omitempty"` - - // When early_stop is true, stops training when accuracy improvement is - // less than 'min_relative_progress'. Used only for iterative training - // algorithms. - MinRelativeProgress *float64 `json:"minRelativeProgress,omitempty"` - - // Whether to train a model from the last checkpoint. - WarmStart *bool `json:"warmStart,omitempty"` - - // Whether to stop early when the loss doesn't improve significantly - // any more (compared to min_relative_progress). Used only for iterative - // training algorithms. - EarlyStop *bool `json:"earlyStop,omitempty"` - - // Name of input label columns in training data. - InputLabelColumns []string `json:"inputLabelColumns,omitempty"` - - // The data split type for training and evaluation, e.g. RANDOM. - DataSplitMethod *string `json:"dataSplitMethod,omitempty"` - - // The fraction of evaluation data over the whole input data. The rest - // of data will be used as training data. The format should be float64. - // Accurate to two decimal places. - // Default value is 0.2. - DataSplitEvalFraction *float64 `json:"dataSplitEvalFraction,omitempty"` - - // The column to split data with. This column won't be used as a - // feature. - // 1. When data_split_method is CUSTOM, the corresponding column should - // be boolean. The rows with true value tag are eval data, and the false - // are training data. - // 2. When data_split_method is SEQ, the first DATA_SPLIT_EVAL_FRACTION - // rows (from smallest to largest) in the corresponding column are used - // as training data, and the rest are eval data. It respects the order - // in Orderable data types: - // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type-properties - DataSplitColumn *string `json:"dataSplitColumn,omitempty"` - - // The strategy to determine learn rate for the current iteration. - LearnRateStrategy *string `json:"learnRateStrategy,omitempty"` - - // Specifies the initial learning rate for the line search learn rate - // strategy. - InitialLearnRate *float64 `json:"initialLearnRate,omitempty"` - - // TODO: map type string float64 for label_class_weights - - // User column specified for matrix factorization models. - UserColumn *string `json:"userColumn,omitempty"` - - // Item column specified for matrix factorization models. - ItemColumn *string `json:"itemColumn,omitempty"` - - // Distance type for clustering models. - DistanceType *string `json:"distanceType,omitempty"` - - // Number of clusters for clustering models. - NumClusters *int64 `json:"numClusters,omitempty"` - - // Google Cloud Storage URI from which the model was imported. Only - // applicable for imported models. - ModelUri *string `json:"modelUri,omitempty"` - - // Optimization strategy for training linear regression models. - OptimizationStrategy *string `json:"optimizationStrategy,omitempty"` - - // Hidden units for dnn models. - HiddenUnits []int64 `json:"hiddenUnits,omitempty"` - - // Batch size for dnn models. - BatchSize *int64 `json:"batchSize,omitempty"` - - // Dropout probability for dnn models. - Dropout *float64 `json:"dropout,omitempty"` - - // Maximum depth of a tree for boosted tree models. - MaxTreeDepth *int64 `json:"maxTreeDepth,omitempty"` - - // Subsample fraction of the training data to grow tree to prevent - // overfitting for boosted tree models. - Subsample *float64 `json:"subsample,omitempty"` - - // Minimum split loss for boosted tree models. - MinSplitLoss *float64 `json:"minSplitLoss,omitempty"` - - // Booster type for boosted tree models. - BoosterType *string `json:"boosterType,omitempty"` - - // Number of parallel trees constructed during each iteration for boosted - // tree models. - NumParallelTree *int64 `json:"numParallelTree,omitempty"` - - // Type of normalization algorithm for boosted tree models using - // dart booster. - DartNormalizeType *string `json:"dartNormalizeType,omitempty"` - - // Tree construction algorithm for boosted tree models. - TreeMethod *string `json:"treeMethod,omitempty"` - - // Minimum sum of instance weight needed in a child for boosted tree - // models. - MinTreeChildWeight *int64 `json:"minTreeChildWeight,omitempty"` - - // Subsample ratio of columns when constructing each tree for boosted tree - // models. - ColsampleBytree *float64 `json:"colsampleBytree,omitempty"` - - // Subsample ratio of columns for each level for boosted tree models. - ColsampleBylevel *float64 `json:"colsampleBylevel,omitempty"` - - // Subsample ratio of columns for each node(split) for boosted tree - // models. - ColsampleBynode *float64 `json:"colsampleBynode,omitempty"` - - // Num factors specified for matrix factorization models. - NumFactors *int64 `json:"numFactors,omitempty"` - - // Feedback type that specifies which algorithm to run for matrix - // factorization. - FeedbackType *string `json:"feedbackType,omitempty"` - - // Hyperparameter for matrix factoration when implicit feedback type is - // specified. - WalsAlpha *float64 `json:"walsAlpha,omitempty"` - - // The method used to initialize the centroids for kmeans algorithm. - KmeansInitializationMethod *string `json:"kmeansInitializationMethod,omitempty"` - - // The column used to provide the initial centroids for kmeans algorithm - // when kmeans_initialization_method is CUSTOM. - KmeansInitializationColumn *string `json:"kmeansInitializationColumn,omitempty"` - - // Column to be designated as time series timestamp for ARIMA model. - TimeSeriesTimestampColumn *string `json:"timeSeriesTimestampColumn,omitempty"` - - // Column to be designated as time series data for ARIMA model. - TimeSeriesDataColumn *string `json:"timeSeriesDataColumn,omitempty"` - - // Whether to enable auto ARIMA or not. - AutoArima *bool `json:"autoArima,omitempty"` - - // A specification of the non-seasonal part of the ARIMA model: the three - // components (p, d, q) are the AR order, the degree of differencing, and - // the MA order. - NonSeasonalOrder *Model_ArimaOrder `json:"nonSeasonalOrder,omitempty"` - - // The data frequency of a time series. - DataFrequency *string `json:"dataFrequency,omitempty"` - - // Whether or not p-value test should be computed for this model. Only - // available for linear and logistic regression models. - CalculatePValues *bool `json:"calculatePValues,omitempty"` - - // Include drift when fitting an ARIMA model. - IncludeDrift *bool `json:"includeDrift,omitempty"` - - // The geographical region based on which the holidays are considered in - // time series modeling. If a valid value is specified, then holiday - // effects modeling is enabled. - HolidayRegion *string `json:"holidayRegion,omitempty"` - - // A list of geographical regions that are used for time series modeling. - HolidayRegions []string `json:"holidayRegions,omitempty"` - - // The time series id column that was used during ARIMA model training. - TimeSeriesIDColumn *string `json:"timeSeriesIDColumn,omitempty"` - - // The time series id columns that were used during ARIMA model training. - TimeSeriesIDColumns []string `json:"timeSeriesIDColumns,omitempty"` - - // The number of periods ahead that need to be forecasted. - Horizon *int64 `json:"horizon,omitempty"` - - // The max value of the sum of non-seasonal p and q. - AutoArimaMaxOrder *int64 `json:"autoArimaMaxOrder,omitempty"` - - // The min value of the sum of non-seasonal p and q. - AutoArimaMinOrder *int64 `json:"autoArimaMinOrder,omitempty"` - - // Number of trials to run this hyperparameter tuning job. - NumTrials *int64 `json:"numTrials,omitempty"` - - // Maximum number of trials to run in parallel. - MaxParallelTrials *int64 `json:"maxParallelTrials,omitempty"` - - // The target evaluation metrics to optimize the hyperparameters for. - HparamTuningObjectives []string `json:"hparamTuningObjectives,omitempty"` - - // If true, perform decompose time series and save the results. - DecomposeTimeSeries *bool `json:"decomposeTimeSeries,omitempty"` - - // If true, clean spikes and dips in the input time series. - CleanSpikesAndDips *bool `json:"cleanSpikesAndDips,omitempty"` - - // If true, detect step changes and make data adjustment in the input time - // series. - AdjustStepChanges *bool `json:"adjustStepChanges,omitempty"` - - // If true, enable global explanation during training. - EnableGlobalExplain *bool `json:"enableGlobalExplain,omitempty"` - - // Number of paths for the sampled Shapley explain method. - SampledShapleyNumPaths *int64 `json:"sampledShapleyNumPaths,omitempty"` - - // Number of integral steps for the integrated gradients explain method. - IntegratedGradientsNumSteps *int64 `json:"integratedGradientsNumSteps,omitempty"` - - // Categorical feature encoding method. - CategoryEncodingMethod *string `json:"categoryEncodingMethod,omitempty"` - - // Based on the selected TF version, the corresponding docker image is - // used to train external models. - TfVersion *string `json:"tfVersion,omitempty"` - - // Enums for color space, used for processing images in Object Table. - // See more details at - // https://www.tensorflow.org/io/tutorials/colorspace. - ColorSpace *string `json:"colorSpace,omitempty"` - - // Name of the instance weight column for training data. - // This column isn't be used as a feature. - InstanceWeightColumn *string `json:"instanceWeightColumn,omitempty"` - - // Smoothing window size for the trend component. When a positive value is - // specified, a center moving average smoothing is applied on the history - // trend. When the smoothing window is out of the boundary at the - // beginning or the end of the trend, the first element or the last - // element is padded to fill the smoothing window before the average is - // applied. - TrendSmoothingWindowSize *int64 `json:"trendSmoothingWindowSize,omitempty"` - - // The fraction of the interpolated length of the time series that's used - // to model the time series trend component. All of the time points of the - // time series are used to model the non-trend component. This training - // option accelerates modeling training without sacrificing much - // forecasting accuracy. You can use this option with - // `minTimeSeriesLength` but not with `maxTimeSeriesLength`. - TimeSeriesLengthFraction *float64 `json:"timeSeriesLengthFraction,omitempty"` - - // The minimum number of time points in a time series that are used in - // modeling the trend component of the time series. If you use this option - // you must also set the `timeSeriesLengthFraction` option. This training - // option ensures that enough time points are available when you use - // `timeSeriesLengthFraction` in trend modeling. This is particularly - // important when forecasting multiple time series in a single query using - // `timeSeriesIdColumn`. If the total number of time points is less than - // the `minTimeSeriesLength` value, then the query uses all available time - // points. - MinTimeSeriesLength *int64 `json:"minTimeSeriesLength,omitempty"` - - // The maximum number of time points in a time series that can be used in - // modeling the trend component of the time series. Don't use this option - // with the `timeSeriesLengthFraction` or `minTimeSeriesLength` options. - MaxTimeSeriesLength *int64 `json:"maxTimeSeriesLength,omitempty"` - - // User-selected XGBoost versions for training of XGBoost models. - XgboostVersion *string `json:"xgboostVersion,omitempty"` - - // Whether to use approximate feature contribution method in XGBoost model - // explanation for global explain. - ApproxGlobalFeatureContrib *bool `json:"approxGlobalFeatureContrib,omitempty"` - - // Whether the model should include intercept during model training. - FitIntercept *bool `json:"fitIntercept,omitempty"` - - // Number of principal components to keep in the PCA model. Must be <= the - // number of features. - NumPrincipalComponents *int64 `json:"numPrincipalComponents,omitempty"` - - // The minimum ratio of cumulative explained variance that needs to be - // given by the PCA model. - PcaExplainedVarianceRatio *float64 `json:"pcaExplainedVarianceRatio,omitempty"` - - // If true, scale the feature values by dividing the feature standard - // deviation. Currently only apply to PCA. - ScaleFeatures *bool `json:"scaleFeatures,omitempty"` - - // The solver for PCA. - PcaSolver *string `json:"pcaSolver,omitempty"` - - // Whether to calculate class weights automatically based on the - // popularity of each label. - AutoClassWeights *bool `json:"autoClassWeights,omitempty"` - - // Activation function of the neural nets. - ActivationFn *string `json:"activationFn,omitempty"` - - // Optimizer used for training the neural nets. - Optimizer *string `json:"optimizer,omitempty"` - - // Budget in hours for AutoML training. - BudgetHours *float64 `json:"budgetHours,omitempty"` - - // Whether to standardize numerical features. Default to true. - StandardizeFeatures *bool `json:"standardizeFeatures,omitempty"` - - // L1 regularization coefficient to activations. - L1RegActivation *float64 `json:"l1RegActivation,omitempty"` - - // The model registry. - ModelRegistry *string `json:"modelRegistry,omitempty"` - - // The version aliases to apply in Vertex AI model registry. Always - // overwrite if the version aliases exists in a existing model. - VertexAiModelVersionAliases []string `json:"vertexAiModelVersionAliases,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ModelReference -type ModelReference struct { - // Required. The ID of the project containing this model. - ProjectID *string `json:"projectID,omitempty"` - - // Required. The ID of the dataset containing this model. - DatasetID *string `json:"datasetID,omitempty"` - - // Required. The ID of the model. The ID must contain only - // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - // length is 1,024 characters. - ModelID *string `json:"modelID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ParquetOptions -type ParquetOptions struct { - // Optional. Indicates whether to infer Parquet ENUM logical type as STRING - // instead of BYTES by default. - EnumAsString *bool `json:"enumAsString,omitempty"` - - // Optional. Indicates whether to use schema inference specifically for - // Parquet LIST logical type. - EnableListInference *bool `json:"enableListInference,omitempty"` - - // Optional. Indicates how to represent a Parquet map if present. - MapTargetType *string `json:"mapTargetType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitionSkew -type PartitionSkew struct { - // Output only. Source stages which produce skewed data. - SkewSources []PartitionSkew_SkewSource `json:"skewSources,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitionSkew.SkewSource -type PartitionSkew_SkewSource struct { - // Output only. Stage id of the skew source stage. - StageID *int64 `json:"stageID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitionedColumn -type PartitionedColumn struct { - // Required. The name of the partition column. - Field *string `json:"field,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PartitioningDefinition -type PartitioningDefinition struct { - // Optional. Details about each partitioning column. This field is output only - // for all partitioning types other than metastore partitioned tables. - // BigQuery native tables only support 1 partitioning column. Other table - // types may support 0, 1 or more partitioning columns. - // For metastore partitioned tables, the order must match the definition order - // in the Hive Metastore, where it must match the physical layout of the - // table. For example, - // - // CREATE TABLE a_table(id BIGINT, name STRING) - // PARTITIONED BY (city STRING, state STRING). - // - // In this case the values must be ['city', 'state'] in that order. - PartitionedColumn []PartitionedColumn `json:"partitionedColumn,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PerformanceInsights -type PerformanceInsights struct { - // Output only. Average execution ms of previous runs. Indicates the job ran - // slow compared to previous executions. To find previous executions, use - // INFORMATION_SCHEMA tables and filter jobs with same query hash. - AvgPreviousExecutionMs *int64 `json:"avgPreviousExecutionMs,omitempty"` - - // Output only. Standalone query stage performance insights, for exploring - // potential improvements. - StagePerformanceStandaloneInsights []StagePerformanceStandaloneInsight `json:"stagePerformanceStandaloneInsights,omitempty"` - - // Output only. Query stage performance insights compared to previous runs, - // for diagnosing performance regression. - StagePerformanceChangeInsights []StagePerformanceChangeInsight `json:"stagePerformanceChangeInsights,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PrimaryKey -type PrimaryKey struct { - // Required. The columns that are composed of the primary key constraint. - Columns []string `json:"columns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.PrivacyPolicy -type PrivacyPolicy struct { - // Optional. Policy used for aggregation thresholds. - AggregationThresholdPolicy *AggregationThresholdPolicy `json:"aggregationThresholdPolicy,omitempty"` - - // Optional. Policy used for differential privacy. - DifferentialPrivacyPolicy *DifferentialPrivacyPolicy `json:"differentialPrivacyPolicy,omitempty"` - - // Optional. Join restriction policy is outside of the one of policies, since - // this policy can be set along with other policies. This policy gives data - // providers the ability to enforce joins on the 'join_allowed_columns' when - // data is queried from a privacy protected view. - JoinRestrictionPolicy *JoinRestrictionPolicy `json:"joinRestrictionPolicy,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryInfo -type QueryInfo struct { - // Output only. Information about query optimizations. - OptimizationDetails *google_protobuf_Struct `json:"optimizationDetails,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameter -type QueryParameter struct { - // Optional. If unset, this is a positional parameter. Otherwise, should be - // unique within a query. - Name *string `json:"name,omitempty"` - - // Required. The type of this parameter. - ParameterType *QueryParameterType `json:"parameterType,omitempty"` - - // Required. The value of this parameter. - ParameterValue *QueryParameterValue `json:"parameterValue,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameterStructType -type QueryParameterStructType struct { - // Optional. The name of this field. - Name *string `json:"name,omitempty"` - - // Required. The type of this field. - Type *QueryParameterType `json:"type,omitempty"` - - // Optional. Human-oriented description of the field. - Description *string `json:"description,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameterType -type QueryParameterType struct { - // Required. The top level type of this field. - Type *string `json:"type,omitempty"` - - // Optional. The type of the array's elements, if this is an array. - ArrayType *QueryParameterType `json:"arrayType,omitempty"` - - // Optional. The types of the fields of this struct, in order, if this is a - // struct. - StructTypes []QueryParameterStructType `json:"structTypes,omitempty"` - - // Optional. The element type of the range, if this is a range. - RangeElementType *QueryParameterType `json:"rangeElementType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryParameterValue -type QueryParameterValue struct { - // Optional. The value of this value, if a simple scalar type. - Value *string `json:"value,omitempty"` - - // Optional. The array values, if this is an array type. - ArrayValues []QueryParameterValue `json:"arrayValues,omitempty"` - - // TODO: map type string message for struct_values - - // Optional. The range value, if this is a range type. - RangeValue *RangeValue `json:"rangeValue,omitempty"` - - // This field should not be used. - AltStructValues []google_protobuf_Value `json:"altStructValues,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.QueryTimelineSample -type QueryTimelineSample struct { - // Milliseconds elapsed since the start of query execution. - ElapsedMs *int64 `json:"elapsedMs,omitempty"` - - // Cumulative slot-ms consumed by the query. - TotalSlotMs *int64 `json:"totalSlotMs,omitempty"` - - // Total units of work remaining for the query. This number can be revised - // (increased or decreased) while the query is running. - PendingUnits *int64 `json:"pendingUnits,omitempty"` - - // Total parallel units of work completed by this query. - CompletedUnits *int64 `json:"completedUnits,omitempty"` - - // Total number of active workers. This does not correspond directly to - // slot usage. This is the largest value observed since the last sample. - ActiveUnits *int64 `json:"activeUnits,omitempty"` - - // Units of work that can be scheduled immediately. Providing additional slots - // for these units of work will accelerate the query, if no other query in - // the reservation needs additional slots. - EstimatedRunnableUnits *int64 `json:"estimatedRunnableUnits,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RangePartitioning -type RangePartitioning struct { - // Required. The name of the column to partition the table on. It must be a - // top-level, INT64 column whose mode is NULLABLE or REQUIRED. - Field *string `json:"field,omitempty"` - - // Defines the ranges for range partitioning. - Range *RangePartitioning_Range `json:"range,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RangePartitioning.Range -type RangePartitioning_Range struct { - // Required. The start of range partitioning, inclusive. This field is an - // INT64 value represented as a string. - Start *string `json:"start,omitempty"` - - // Required. The end of range partitioning, exclusive. This field is an - // INT64 value represented as a string. - End *string `json:"end,omitempty"` - - // Required. The width of each interval. This field is an INT64 value - // represented as a string. - Interval *string `json:"interval,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RangeValue -type RangeValue struct { - // Optional. The start value of the range. A missing value represents an - // unbounded start. - Start *QueryParameterValue `json:"start,omitempty"` - - // Optional. The end value of the range. A missing value represents an - // unbounded end. - End *QueryParameterValue `json:"end,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RemoteModelInfo -type RemoteModelInfo struct { - // Output only. The endpoint for remote model. - Endpoint *string `json:"endpoint,omitempty"` - - // Output only. The remote service type for remote model. - RemoteServiceType *string `json:"remoteServiceType,omitempty"` - - // Output only. Fully qualified name of the user-provided connection object of - // the remote model. Format: - // ```"projects/{project_id}/locations/{location_id}/connections/{connection_id}"``` - Connection *string `json:"connection,omitempty"` - - // Output only. Max number of rows in each batch sent to the remote service. - // If unset, the number of rows in each batch is set dynamically. - MaxBatchingRows *int64 `json:"maxBatchingRows,omitempty"` - - // Output only. The model version for LLM. - RemoteModelVersion *string `json:"remoteModelVersion,omitempty"` - - // Output only. The name of the speech recognizer to use for speech - // recognition. The expected format is - // `projects/{project}/locations/{location}/recognizers/{recognizer}`. - // Customers can specify this field at model creation. If not specified, a - // default recognizer `projects/{model - // project}/locations/global/recognizers/_` will be used. See more details at - // [recognizers](https://cloud.google.com/speech-to-text/v2/docs/reference/rest/v2/projects.locations.recognizers) - SpeechRecognizer *string `json:"speechRecognizer,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RestrictionConfig -type RestrictionConfig struct { - // Output only. Specifies the type of dataset/table restriction. - Type *string `json:"type,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Routine -type Routine struct { - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Required. Reference describing the ID of this routine. - RoutineReference *RoutineReference `json:"routineReference,omitempty"` - - // Required. The type of routine. - RoutineType *string `json:"routineType,omitempty"` - - // Output only. The time when this routine was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Output only. The time when this routine was last modified, in milliseconds - // since the epoch. - LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` - - // Optional. Defaults to "SQL" if remote_function_options field is absent, not - // set otherwise. - Language *string `json:"language,omitempty"` - - // Optional. - Arguments []Routine_Argument `json:"arguments,omitempty"` - - // Optional if language = "SQL"; required otherwise. - // Cannot be set if routine_type = "TABLE_VALUED_FUNCTION". - // - // If absent, the return type is inferred from definition_body at query time - // in each query that references this routine. If present, then the evaluated - // result will be cast to the specified returned type at query time. - // - // For example, for the functions created with the following statements: - // - // * `CREATE FUNCTION Add(x FLOAT64, y FLOAT64) RETURNS FLOAT64 AS (x + y);` - // - // * `CREATE FUNCTION Increment(x FLOAT64) AS (Add(x, 1));` - // - // * `CREATE FUNCTION Decrement(x FLOAT64) RETURNS FLOAT64 AS (Add(x, -1));` - // - // The return_type is `{type_kind: "FLOAT64"}` for `Add` and `Decrement`, and - // is absent for `Increment` (inferred as FLOAT64 at query time). - // - // Suppose the function `Add` is replaced by - // `CREATE OR REPLACE FUNCTION Add(x INT64, y INT64) AS (x + y);` - // - // Then the inferred return type of `Increment` is automatically changed to - // INT64 at query time, while the return type of `Decrement` remains FLOAT64. - ReturnType *StandardSqlDataType `json:"returnType,omitempty"` - - // Optional. Can be set only if routine_type = "TABLE_VALUED_FUNCTION". - // - // If absent, the return table type is inferred from definition_body at query - // time in each query that references this routine. If present, then the - // columns in the evaluated table result will be cast to match the column - // types specified in return table type, at query time. - ReturnTableType *StandardSqlTableType `json:"returnTableType,omitempty"` - - // Optional. If language = "JAVASCRIPT", this field stores the path of the - // imported JAVASCRIPT libraries. - ImportedLibraries []string `json:"importedLibraries,omitempty"` - - // Required. The body of the routine. - // - // For functions, this is the expression in the AS clause. - // - // If language=SQL, it is the substring inside (but excluding) the - // parentheses. For example, for the function created with the following - // statement: - // - // `CREATE FUNCTION JoinLines(x string, y string) as (concat(x, "\n", y))` - // - // The definition_body is `concat(x, "\n", y)` (\n is not replaced with - // linebreak). - // - // If language=JAVASCRIPT, it is the evaluated string in the AS clause. - // For example, for the function created with the following statement: - // - // `CREATE FUNCTION f() RETURNS STRING LANGUAGE js AS 'return "\n";\n'` - // - // The definition_body is - // - // `return "\n";\n` - // - // Note that both \n are replaced with linebreaks. - DefinitionBody *string `json:"definitionBody,omitempty"` - - // Optional. The description of the routine, if defined. - Description *string `json:"description,omitempty"` - - // Optional. The determinism level of the JavaScript UDF, if defined. - DeterminismLevel *string `json:"determinismLevel,omitempty"` - - // Optional. The security mode of the routine, if defined. If not defined, the - // security mode is automatically determined from the routine's configuration. - SecurityMode *string `json:"securityMode,omitempty"` - - // Optional. Use this option to catch many common errors. Error checking is - // not exhaustive, and successfully creating a procedure doesn't guarantee - // that the procedure will successfully execute at runtime. If `strictMode` is - // set to `TRUE`, the procedure body is further checked for errors such as - // non-existent tables or columns. The `CREATE PROCEDURE` statement fails if - // the body fails any of these checks. - // - // If `strictMode` is set to `FALSE`, the procedure body is checked only for - // syntax. For procedures that invoke themselves recursively, specify - // `strictMode=FALSE` to avoid non-existent procedure errors during - // validation. - // - // Default value is `TRUE`. - StrictMode *bool `json:"strictMode,omitempty"` - - // Optional. Remote function specific options. - RemoteFunctionOptions *Routine_RemoteFunctionOptions `json:"remoteFunctionOptions,omitempty"` - - // Optional. Spark specific options. - SparkOptions *SparkOptions `json:"sparkOptions,omitempty"` - - // Optional. If set to `DATA_MASKING`, the function is validated and made - // available as a masking function. For more information, see [Create custom - // masking - // routines](https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask). - DataGovernanceType *string `json:"dataGovernanceType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Routine.Argument -type Routine_Argument struct { - // Optional. The name of this argument. Can be absent for function return - // argument. - Name *string `json:"name,omitempty"` - - // Optional. Defaults to FIXED_TYPE. - ArgumentKind *string `json:"argumentKind,omitempty"` - - // Optional. Specifies whether the argument is input or output. - // Can be set for procedures only. - Mode *string `json:"mode,omitempty"` - - // Required unless argument_kind = ANY_TYPE. - DataType *StandardSqlDataType `json:"dataType,omitempty"` - - // Optional. Whether the argument is an aggregate function parameter. - // Must be Unset for routine types other than AGGREGATE_FUNCTION. - // For AGGREGATE_FUNCTION, if set to false, it is equivalent to adding "NOT - // AGGREGATE" clause in DDL; Otherwise, it is equivalent to omitting "NOT - // AGGREGATE" clause in DDL. - IsAggregate *bool `json:"isAggregate,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Routine.RemoteFunctionOptions -type Routine_RemoteFunctionOptions struct { - // Endpoint of the user-provided remote service, e.g. - // ```https://us-east1-my_gcf_project.cloudfunctions.net/remote_add``` - Endpoint *string `json:"endpoint,omitempty"` - - // Fully qualified name of the user-provided connection object which holds - // the authentication information to send requests to the remote service. - // Format: - // ```"projects/{projectId}/locations/{locationId}/connections/{connectionId}"``` - Connection *string `json:"connection,omitempty"` - - // User-defined context as a set of key/value pairs, which will be sent as - // function invocation context together with batched arguments in the - // requests to the remote service. The total number of bytes of keys and - // values must be less than 8KB. - UserDefinedContext map[string]string `json:"userDefinedContext,omitempty"` - - // Max number of rows in each batch sent to the remote service. - // If absent or if 0, BigQuery dynamically decides the number of rows in a - // batch. - MaxBatchingRows *int64 `json:"maxBatchingRows,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RoutineReference -type RoutineReference struct { - // Required. The ID of the project containing this routine. - ProjectId *string `json:"projectId"` - - // Required. The ID of the dataset containing this routine. - DatasetId *string `json:"datasetId"` - - // Required. The ID of the routine. The ID must contain only - // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - // length is 256 characters. - RoutineId *string `json:"routineId"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RowAccessPolicy -type RowAccessPolicy struct { - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Required. Reference describing the ID of this row access policy. - RowAccessPolicyReference *RowAccessPolicyReference `json:"rowAccessPolicyReference,omitempty"` - - // Required. A SQL boolean expression that represents the rows defined by this - // row access policy, similar to the boolean expression in a WHERE clause of a - // SELECT query on a table. - // References to other tables, routines, and temporary functions are not - // supported. - // - // Examples: region="EU" - // date_field = CAST('2019-9-27' as DATE) - // nullable_field is not NULL - // numeric_field BETWEEN 1.0 AND 5.0 - FilterPredicate *string `json:"filterPredicate,omitempty"` - - // Output only. The time when this row access policy was created, in - // milliseconds since the epoch. - CreationTime *string `json:"creationTime,omitempty"` - - // Output only. The time when this row access policy was last modified, in - // milliseconds since the epoch. - LastModifiedTime *string `json:"lastModifiedTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RowAccessPolicyReference -type RowAccessPolicyReference struct { - // Required. The ID of the project containing this row access policy. - ProjectID *string `json:"projectID,omitempty"` - - // Required. The ID of the dataset containing this row access policy. - DatasetID *string `json:"datasetID,omitempty"` - - // Required. The ID of the table containing this row access policy. - TableID *string `json:"tableID,omitempty"` - - // Required. The ID of the row access policy. The ID must contain only - // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum - // length is 256 characters. - PolicyID *string `json:"policyID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.RowLevelSecurityStatistics -type RowLevelSecurityStatistics struct { - // Whether any accessed data was protected by row access policies. - RowLevelSecurityApplied *bool `json:"rowLevelSecurityApplied,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ScriptOptions -type ScriptOptions struct { - // Timeout period for each statement in a script. - StatementTimeoutMs *int64 `json:"statementTimeoutMs,omitempty"` - - // Limit on the number of bytes billed per statement. Exceeding this budget - // results in an error. - StatementByteBudget *int64 `json:"statementByteBudget,omitempty"` - - // Determines which statement in the script represents the "key result", - // used to populate the schema and query results of the script job. - // Default is LAST. - KeyResultStatement *string `json:"keyResultStatement,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ScriptStatistics -type ScriptStatistics struct { - // Whether this child job was a statement or expression. - EvaluationKind *string `json:"evaluationKind,omitempty"` - - // Stack trace showing the line/column/procedure name of each frame on the - // stack at the point where the current evaluation happened. The leaf frame - // is first, the primary script is last. Never empty. - StackFrames []ScriptStatistics_ScriptStackFrame `json:"stackFrames,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ScriptStatistics.ScriptStackFrame -type ScriptStatistics_ScriptStackFrame struct { - // Output only. One-based start line. - StartLine *int32 `json:"startLine,omitempty"` - - // Output only. One-based start column. - StartColumn *int32 `json:"startColumn,omitempty"` - - // Output only. One-based end line. - EndLine *int32 `json:"endLine,omitempty"` - - // Output only. One-based end column. - EndColumn *int32 `json:"endColumn,omitempty"` - - // Output only. Name of the active procedure, empty if in a top-level - // script. - ProcedureID *string `json:"procedureID,omitempty"` - - // Output only. Text of the current statement/expression. - Text *string `json:"text,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SearchStatistics -type SearchStatistics struct { - // Specifies the index usage mode for the query. - IndexUsageMode *string `json:"indexUsageMode,omitempty"` - - // When `indexUsageMode` is `UNUSED` or `PARTIALLY_USED`, this field explains - // why indexes were not used in all or part of the search query. If - // `indexUsageMode` is `FULLY_USED`, this field is not populated. - IndexUnusedReasons []IndexUnusedReason `json:"indexUnusedReasons,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SerDeInfo -type SerDeInfo struct { - // Optional. Name of the SerDe. - // The maximum length is 256 characters. - Name *string `json:"name,omitempty"` - - // Required. Specifies a fully-qualified class name of the serialization - // library that is responsible for the translation of data between table - // representation and the underlying low-level input and output format - // structures. The maximum length is 256 characters. - SerializationLibrary *string `json:"serializationLibrary,omitempty"` - - // Optional. Key-value pairs that define the initialization parameters for the - // serialization library. - // Maximum size 10 Kib. - Parameters map[string]string `json:"parameters,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SessionInfo -type SessionInfo struct { - // Output only. The id of the session. - SessionID *string `json:"sessionID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SnapshotDefinition -type SnapshotDefinition struct { - // Required. Reference describing the ID of the table that was snapshot. - BaseTableReference *TableReference `json:"baseTableReference,omitempty"` - - // Required. The time at which the base table was snapshot. This value is - // reported in the JSON response using RFC3339 format. - SnapshotTime *string `json:"snapshotTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SparkOptions -type SparkOptions struct { - // Fully qualified name of the user-provided Spark connection object. Format: - // ```"projects/{project_id}/locations/{location_id}/connections/{connection_id}"``` - Connection *string `json:"connection,omitempty"` - - // Runtime version. If not specified, the default runtime version is used. - RuntimeVersion *string `json:"runtimeVersion,omitempty"` - - // Custom container image for the runtime environment. - ContainerImage *string `json:"containerImage,omitempty"` - - // Configuration properties as a set of key/value pairs, which will be passed - // on to the Spark application. For more information, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html) and the - // [procedure option - // list](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#procedure_option_list). - Properties map[string]string `json:"properties,omitempty"` - - // The main file/jar URI of the Spark application. Exactly one of the - // definition_body field and the main_file_uri field must be set for Python. - // Exactly one of main_class and main_file_uri field - // should be set for Java/Scala language type. - MainFileUri *string `json:"mainFileUri,omitempty"` - - // Python files to be placed on the PYTHONPATH for PySpark application. - // Supported file types: `.py`, `.egg`, and `.zip`. For more information - // about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - PyFileUris []string `json:"pyFileUris,omitempty"` - - // JARs to include on the driver and executor CLASSPATH. - // For more information about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - JarUris []string `json:"jarUris,omitempty"` - - // Files to be placed in the working directory of each executor. - // For more information about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - FileUris []string `json:"fileUris,omitempty"` - - // Archive files to be extracted into the working directory of each executor. - // For more information about Apache Spark, see - // [Apache Spark](https://spark.apache.org/docs/latest/index.html). - ArchiveUris []string `json:"archiveUris,omitempty"` - - // The fully qualified name of a class in jar_uris, for example, - // com.example.wordcount. Exactly one of main_class and main_jar_uri field - // should be set for Java/Scala language type. - MainClass *string `json:"mainClass,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SparkStatistics -type SparkStatistics struct { - // Output only. Spark job ID if a Spark job is created successfully. - SparkJobID *string `json:"sparkJobID,omitempty"` - - // Output only. Location where the Spark job is executed. - // A location is selected by BigQueury for jobs configured to run in a - // multi-region. - SparkJobLocation *string `json:"sparkJobLocation,omitempty"` - - // Output only. Endpoints returned from Dataproc. - // Key list: - // - history_server_endpoint: A link to Spark job UI. - Endpoints map[string]string `json:"endpoints,omitempty"` - - // Output only. Logging info is used to generate a link to Cloud Logging. - LoggingInfo *SparkStatistics_LoggingInfo `json:"loggingInfo,omitempty"` - - // Output only. The Cloud KMS encryption key that is used to protect the - // resources created by the Spark job. If the Spark procedure uses the invoker - // security mode, the Cloud KMS encryption key is either inferred from the - // provided system variable, - // `@@spark_proc_properties.kms_key_name`, or the default key of the BigQuery - // job's project (if the CMEK organization policy is enforced). Otherwise, the - // Cloud KMS key is either inferred from the Spark connection associated with - // the procedure (if it is provided), or from the default key of the Spark - // connection's project if the CMEK organization policy is enforced. - // - // Example: - // - // * `projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]` - KmsKeyName *string `json:"kmsKeyName,omitempty"` - - // Output only. The Google Cloud Storage bucket that is used as the default - // file system by the Spark application. This field is only filled when the - // Spark procedure uses the invoker security mode. The `gcsStagingBucket` - // bucket is inferred from the `@@spark_proc_properties.staging_bucket` system - // variable (if it is provided). Otherwise, BigQuery creates a default staging - // bucket for the job and returns the bucket name in this field. - // - // Example: - // - // * `gs://[bucket_name]` - GcsStagingBucket *string `json:"gcsStagingBucket,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SparkStatistics.LoggingInfo -type SparkStatistics_LoggingInfo struct { - // Output only. Resource type used for logging. - ResourceType *string `json:"resourceType,omitempty"` - - // Output only. Project ID where the Spark logs were written. - ProjectID *string `json:"projectID,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StagePerformanceChangeInsight -type StagePerformanceChangeInsight struct { - // Output only. The stage id that the insight mapped to. - StageID *int64 `json:"stageID,omitempty"` - - // Output only. Input data change insight of the query stage. - InputDataChange *InputDataChange `json:"inputDataChange,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StagePerformanceStandaloneInsight -type StagePerformanceStandaloneInsight struct { - // Output only. The stage id that the insight mapped to. - StageID *int64 `json:"stageID,omitempty"` - - // Output only. True if the stage has a slot contention issue. - SlotContention *bool `json:"slotContention,omitempty"` - - // Output only. True if the stage has insufficient shuffle quota. - InsufficientShuffleQuota *bool `json:"insufficientShuffleQuota,omitempty"` - - // Output only. If present, the stage had the following reasons for being - // disqualified from BI Engine execution. - BiEngineReasons []BiEngineReason `json:"biEngineReasons,omitempty"` - - // Output only. High cardinality joins in the stage. - HighCardinalityJoins []HighCardinalityJoin `json:"highCardinalityJoins,omitempty"` - - // Output only. Partition skew in the stage. - PartitionSkew *PartitionSkew `json:"partitionSkew,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlDataType -type StandardSqlDataType struct { - // Required. The top level type of this field. - // Can be any GoogleSQL data type (e.g., "INT64", "DATE", "ARRAY"). - TypeKind *string `json:"typeKind,omitempty"` - - // The type of the array's elements, if type_kind = "ARRAY". - ArrayElementType *StandardSqlDataType `json:"arrayElementType,omitempty"` - - // The fields of this struct, in order, if type_kind = "STRUCT". - StructType *StandardSqlStructType `json:"structType,omitempty"` - - // The type of the range's elements, if type_kind = "RANGE". - RangeElementType *StandardSqlDataType `json:"rangeElementType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlField -type StandardSqlField struct { - // Optional. The name of this field. Can be absent for struct fields. - Name *string `json:"name,omitempty"` - - // Optional. The type of this parameter. Absent if not explicitly - // specified (e.g., CREATE FUNCTION statement can omit the return type; - // in this case the output parameter does not have this "type" field). - Type *StandardSqlDataType `json:"type,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlStructType -type StandardSqlStructType struct { - // Fields within the struct. - Fields []StandardSqlField `json:"fields,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StandardSqlTableType -type StandardSqlTableType struct { - // The columns in this table type - Columns []StandardSqlField `json:"columns,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.StorageDescriptor -type StorageDescriptor struct { - // Optional. The physical location of the table - // (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or - // 'gs://spark-dataproc-data/pangea-data/*'). - // The maximum length is 2056 bytes. - LocationUri *string `json:"locationUri,omitempty"` - - // Optional. Specifies the fully qualified class name of the InputFormat - // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). - // The maximum length is 128 characters. - InputFormat *string `json:"inputFormat,omitempty"` - - // Optional. Specifies the fully qualified class name of the OutputFormat - // (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). - // The maximum length is 128 characters. - OutputFormat *string `json:"outputFormat,omitempty"` - - // Optional. Serializer and deserializer information. - SerdeInfo *SerDeInfo `json:"serdeInfo,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Streamingbuffer -type Streamingbuffer struct { - // Output only. A lower-bound estimate of the number of bytes currently in - // the streaming buffer. - EstimatedBytes *uint64 `json:"estimatedBytes,omitempty"` - - // Output only. A lower-bound estimate of the number of rows currently in the - // streaming buffer. - EstimatedRows *uint64 `json:"estimatedRows,omitempty"` - - // Output only. Contains the timestamp of the oldest entry in the streaming - // buffer, in milliseconds since the epoch, if the streaming buffer is - // available. - OldestEntryTime *uint64 `json:"oldestEntryTime,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.SystemVariables -type SystemVariables struct { - - // TODO: map type string message for types - - // Output only. Value for each system variable. - Values *google_protobuf_Struct `json:"values,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.Table -type Table struct { - // The type of resource ID. - Kind *string `json:"kind,omitempty"` - - // Output only. A hash of this resource. - Etag *string `json:"etag,omitempty"` - - // Output only. An opaque ID uniquely identifying the table. - ID *string `json:"id,omitempty"` - - // Output only. A URL that can be used to access this resource again. - SelfLink *string `json:"selfLink,omitempty"` - - // Required. Reference describing the ID of this table. - TableReference *TableReference `json:"tableReference,omitempty"` - - // Optional. A descriptive name for this table. - FriendlyName *string `json:"friendlyName,omitempty"` - - // Optional. A user-friendly description of this table. - Description *string `json:"description,omitempty"` - - // The labels associated with this table. You can use these to organize and - // group your tables. Label keys and values can be no longer than 63 - // characters, can only contain lowercase letters, numeric characters, - // underscores and dashes. International characters are allowed. Label values - // are optional. Label keys must start with a letter and each label in the - // list must have a different key. - Labels map[string]string `json:"labels,omitempty"` - - // Optional. Describes the schema of this table. - Schema *TableSchema `json:"schema,omitempty"` - - // If specified, configures time-based partitioning for this table. - TimePartitioning *TimePartitioning `json:"timePartitioning,omitempty"` - - // If specified, configures range partitioning for this table. - RangePartitioning *RangePartitioning `json:"rangePartitioning,omitempty"` - - // Clustering specification for the table. Must be specified with time-based - // partitioning, data in the table will be first partitioned and subsequently - // clustered. - Clustering *Clustering `json:"clustering,omitempty"` - - // Optional. If set to true, queries over this table require - // a partition filter that can be used for partition elimination to be - // specified. - RequirePartitionFilter *bool `json:"requirePartitionFilter,omitempty"` - - // Optional. The partition information for all table formats, including - // managed partitioned tables, hive partitioned tables, iceberg partitioned, - // and metastore partitioned tables. This field is only populated for - // metastore partitioned tables. For other table formats, this is an output - // only field. - PartitionDefinition *PartitioningDefinition `json:"partitionDefinition,omitempty"` - - // Output only. The size of this table in logical bytes, excluding any data in - // the streaming buffer. - NumBytes *int64 `json:"numBytes,omitempty"` - - // Output only. The physical size of this table in bytes. This includes - // storage used for time travel. - NumPhysicalBytes *int64 `json:"numPhysicalBytes,omitempty"` - - // Output only. The number of logical bytes in the table that are considered - // "long-term storage". - NumLongTermBytes *int64 `json:"numLongTermBytes,omitempty"` - - // Output only. The number of rows of data in this table, excluding any data - // in the streaming buffer. - NumRows *uint64 `json:"numRows,omitempty"` - - // Output only. The time when this table was created, in milliseconds since - // the epoch. - CreationTime *int64 `json:"creationTime,omitempty"` - - // Optional. The time when this table expires, in milliseconds since the - // epoch. If not present, the table will persist indefinitely. Expired tables - // will be deleted and their storage reclaimed. The defaultTableExpirationMs - // property of the encapsulating dataset can be used to set a default - // expirationTime on newly created tables. - ExpirationTime *int64 `json:"expirationTime,omitempty"` - - // Output only. The time when this table was last modified, in milliseconds - // since the epoch. - LastModifiedTime *uint64 `json:"lastModifiedTime,omitempty"` - - // Output only. Describes the table type. The following values are supported: - // - // * `TABLE`: A normal BigQuery table. - // * `VIEW`: A virtual table defined by a SQL query. - // * `EXTERNAL`: A table that references data stored in an external storage - // system, such as Google Cloud Storage. - // * `MATERIALIZED_VIEW`: A precomputed view defined by a SQL query. - // * `SNAPSHOT`: An immutable BigQuery table that preserves the contents of a - // base table at a particular time. See additional information on - // [table - // snapshots](https://cloud.google.com/bigquery/docs/table-snapshots-intro). - // - // The default value is `TABLE`. - Type *string `json:"type,omitempty"` - - // Optional. The view definition. - View *ViewDefinition `json:"view,omitempty"` - - // Optional. The materialized view definition. - MaterializedView *MaterializedViewDefinition `json:"materializedView,omitempty"` - - // Output only. The materialized view status. - MaterializedViewStatus *MaterializedViewStatus `json:"materializedViewStatus,omitempty"` - - // Optional. Describes the data format, location, and other properties of - // a table stored outside of BigQuery. By defining these properties, the data - // source can then be queried as if it were a standard BigQuery table. - ExternalDataConfiguration *ExternalDataConfiguration `json:"externalDataConfiguration,omitempty"` - - // Optional. Specifies the configuration of a BigLake managed table. - BiglakeConfiguration *BigLakeConfiguration `json:"biglakeConfiguration,omitempty"` - - // Output only. The geographic location where the table resides. This value - // is inherited from the dataset. - Location *string `json:"location,omitempty"` - - // Output only. Contains information regarding this table's streaming buffer, - // if one is present. This field will be absent if the table is not being - // streamed to or if there is no data in the streaming buffer. - StreamingBuffer *Streamingbuffer `json:"streamingBuffer,omitempty"` - - // Custom encryption configuration (e.g., Cloud KMS keys). - EncryptionConfiguration *EncryptionConfiguration `json:"encryptionConfiguration,omitempty"` - - // Output only. Contains information about the snapshot. This value is set via - // snapshot creation. - SnapshotDefinition *SnapshotDefinition `json:"snapshotDefinition,omitempty"` - - // Optional. Defines the default collation specification of new STRING fields - // in the table. During table creation or update, if a STRING field is added - // to this table without explicit collation specified, then the table inherits - // the table default collation. A change to this field affects only fields - // added afterwards, and does not alter the existing fields. - // The following values are supported: - // - // * 'und:ci': undetermined locale, case insensitive. - // * '': empty string. Default to case-sensitive behavior. - DefaultCollation *string `json:"defaultCollation,omitempty"` - - // Optional. Defines the default rounding mode specification of new decimal - // fields (NUMERIC OR BIGNUMERIC) in the table. During table creation or - // update, if a decimal field is added to this table without an explicit - // rounding mode specified, then the field inherits the table default - // rounding mode. Changing this field doesn't affect existing fields. - DefaultRoundingMode *string `json:"defaultRoundingMode,omitempty"` - - // Output only. Contains information about the clone. This value is set via - // the clone operation. - CloneDefinition *CloneDefinition `json:"cloneDefinition,omitempty"` - - // Output only. Number of physical bytes used by time travel storage (deleted - // or changed data). This data is not kept in real time, and might be delayed - // by a few seconds to a few minutes. - NumTimeTravelPhysicalBytes *int64 `json:"numTimeTravelPhysicalBytes,omitempty"` - - // Output only. Total number of logical bytes in the table or materialized - // view. - NumTotalLogicalBytes *int64 `json:"numTotalLogicalBytes,omitempty"` - - // Output only. Number of logical bytes that are less than 90 days old. - NumActiveLogicalBytes *int64 `json:"numActiveLogicalBytes,omitempty"` - - // Output only. Number of logical bytes that are more than 90 days old. - NumLongTermLogicalBytes *int64 `json:"numLongTermLogicalBytes,omitempty"` - - // Output only. Number of physical bytes used by current live data storage. - // This data is not kept in real time, and might be delayed by a few seconds - // to a few minutes. - NumCurrentPhysicalBytes *int64 `json:"numCurrentPhysicalBytes,omitempty"` - - // Output only. The physical size of this table in bytes. This also includes - // storage used for time travel. This data is not kept in real time, and might - // be delayed by a few seconds to a few minutes. - NumTotalPhysicalBytes *int64 `json:"numTotalPhysicalBytes,omitempty"` - - // Output only. Number of physical bytes less than 90 days old. This data is - // not kept in real time, and might be delayed by a few seconds to a few - // minutes. - NumActivePhysicalBytes *int64 `json:"numActivePhysicalBytes,omitempty"` - - // Output only. Number of physical bytes more than 90 days old. - // This data is not kept in real time, and might be delayed by a few seconds - // to a few minutes. - NumLongTermPhysicalBytes *int64 `json:"numLongTermPhysicalBytes,omitempty"` - - // Output only. The number of partitions present in the table or materialized - // view. This data is not kept in real time, and might be delayed by a few - // seconds to a few minutes. - NumPartitions *int64 `json:"numPartitions,omitempty"` - - // Optional. The maximum staleness of data that could be returned when the - // table (or stale MV) is queried. Staleness encoded as a string encoding - // of sql IntervalValue type. - MaxStaleness *string `json:"maxStaleness,omitempty"` - - // Optional. Output only. Restriction config for table. If set, restrict - // certain accesses on the table based on the config. See [Data - // egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress) - // for more details. - Restrictions *RestrictionConfig `json:"restrictions,omitempty"` - - // Optional. Tables Primary Key and Foreign Key information - TableConstraints *TableConstraints `json:"tableConstraints,omitempty"` - - // Optional. The [tags](https://cloud.google.com/bigquery/docs/tags) attached - // to this table. Tag keys are globally unique. Tag key is expected to be in - // the namespaced format, for example "123456789012/environment" where - // 123456789012 is the ID of the parent organization or project resource for - // this tag key. Tag value is expected to be the short name, for example - // "Production". See [Tag - // definitions](https://cloud.google.com/iam/docs/tags-access-control#definitions) - // for more details. - ResourceTags map[string]string `json:"resourceTags,omitempty"` - - // Optional. Table replication info for table created `AS REPLICA` DDL like: - // `CREATE MATERIALIZED VIEW mv1 AS REPLICA OF src_mv` - TableReplicationInfo *TableReplicationInfo `json:"tableReplicationInfo,omitempty"` - - // Optional. Output only. Table references of all replicas currently active on - // the table. - Replicas []TableReference `json:"replicas,omitempty"` - - // Optional. Options defining open source compatible table. - ExternalCatalogTableOptions *ExternalCatalogTableOptions `json:"externalCatalogTableOptions,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableConstraints -type TableConstraints struct { - // Optional. Represents a primary key constraint on a table's columns. - // Present only if the table has a primary key. - // The primary key is not enforced. - PrimaryKey *PrimaryKey `json:"primaryKey,omitempty"` - - // Optional. Present only if the table has a foreign key. - // The foreign key is not enforced. - ForeignKeys []ForeignKey `json:"foreignKeys,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableFieldSchema -type TableFieldSchema struct { - // Required. The field name. The name must contain only letters (a-z, A-Z), - // numbers (0-9), or underscores (_), and must start with a letter or - // underscore. The maximum length is 300 characters. - Name *string `json:"name,omitempty"` - - // Required. The field data type. Possible values include: - // - // * STRING - // * BYTES - // * INTEGER (or INT64) - // * FLOAT (or FLOAT64) - // * BOOLEAN (or BOOL) - // * TIMESTAMP - // * DATE - // * TIME - // * DATETIME - // * GEOGRAPHY - // * NUMERIC - // * BIGNUMERIC - // * JSON - // * RECORD (or STRUCT) - // * RANGE - // - // Use of RECORD/STRUCT indicates that the field contains a nested schema. - Type *string `json:"type,omitempty"` - - // Optional. The field mode. Possible values include NULLABLE, REQUIRED and - // REPEATED. The default value is NULLABLE. - Mode *string `json:"mode,omitempty"` - - // Optional. Describes the nested schema fields if the type property is set - // to RECORD. - Fields []TableFieldSchema `json:"fields,omitempty"` - - // Optional. The field description. The maximum length is 1,024 characters. - Description *string `json:"description,omitempty"` - - // Optional. The policy tags attached to this field, used for field-level - // access control. If not set, defaults to empty policy_tags. - PolicyTags *TableFieldSchema_PolicyTagList `json:"policyTags,omitempty"` - - // Optional. Data policy options, will replace the data_policies. - DataPolicies []DataPolicyOption `json:"dataPolicies,omitempty"` - - // Optional. Maximum length of values of this field for STRINGS or BYTES. - // - // If max_length is not specified, no maximum length constraint is imposed - // on this field. - // - // If type = "STRING", then max_length represents the maximum UTF-8 - // length of strings in this field. - // - // If type = "BYTES", then max_length represents the maximum number of - // bytes in this field. - // - // It is invalid to set this field if type ≠ "STRING" and ≠ "BYTES". - MaxLength *int64 `json:"maxLength,omitempty"` - - // Optional. Precision (maximum number of total digits in base 10) and scale - // (maximum number of digits in the fractional part in base 10) constraints - // for values of this field for NUMERIC or BIGNUMERIC. - // - // It is invalid to set precision or scale if type ≠ "NUMERIC" and ≠ - // "BIGNUMERIC". - // - // If precision and scale are not specified, no value range constraint is - // imposed on this field insofar as values are permitted by the type. - // - // Values of this NUMERIC or BIGNUMERIC field must be in this range when: - // - // * Precision (P) and scale (S) are specified: - // [-10P-S + 10-S, - // 10P-S - 10-S] - // * Precision (P) is specified but not scale (and thus scale is - // interpreted to be equal to zero): - // [-10P + 1, 10P - 1]. - // - // Acceptable values for precision and scale if both are specified: - // - // * If type = "NUMERIC": - // 1 ≤ precision - scale ≤ 29 and 0 ≤ scale ≤ 9. - // * If type = "BIGNUMERIC": - // 1 ≤ precision - scale ≤ 38 and 0 ≤ scale ≤ 38. - // - // Acceptable values for precision if only precision is specified but not - // scale (and thus scale is interpreted to be equal to zero): - // - // * If type = "NUMERIC": 1 ≤ precision ≤ 29. - // * If type = "BIGNUMERIC": 1 ≤ precision ≤ 38. - // - // If scale is specified but not precision, then it is invalid. - Precision *int64 `json:"precision,omitempty"` - - // Optional. See documentation for precision. - Scale *int64 `json:"scale,omitempty"` - - // Optional. Specifies the rounding mode to be used when storing values of - // NUMERIC and BIGNUMERIC type. - RoundingMode *string `json:"roundingMode,omitempty"` - - // Optional. Field collation can be set only when the type of field is STRING. - // The following values are supported: - // - // * 'und:ci': undetermined locale, case insensitive. - // * '': empty string. Default to case-sensitive behavior. - Collation *string `json:"collation,omitempty"` - - // Optional. A SQL expression to specify the [default value] - // (https://cloud.google.com/bigquery/docs/default-values) for this field. - DefaultValueExpression *string `json:"defaultValueExpression,omitempty"` - - // Optional. The subtype of the RANGE, if the type of this field is RANGE. If - // the type is RANGE, this field is required. Values for the field element - // type can be the following: - // - // * DATE - // * DATETIME - // * TIMESTAMP - RangeElementType *TableFieldSchema_FieldElementType `json:"rangeElementType,omitempty"` - - // Optional. Definition of the foreign data type. - // Only valid for top-level schema fields (not nested fields). - // If the type is FOREIGN, this field is required. - ForeignTypeDefinition *string `json:"foreignTypeDefinition,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableFieldSchema.FieldElementType -type TableFieldSchema_FieldElementType struct { - // Required. The type of a field element. For more information, see - // [TableFieldSchema.type][google.cloud.bigquery.v2.TableFieldSchema.type]. - Type *string `json:"type,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableFieldSchema.PolicyTagList -type TableFieldSchema_PolicyTagList struct { - // A list of policy tag resource names. For example, - // "projects/1/locations/eu/taxonomies/2/policyTags/3". At most 1 policy tag - // is currently allowed. - Names []string `json:"names,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableList -type TableList struct { - // The type of list. - Kind *string `json:"kind,omitempty"` - - // A hash of this page of results. - Etag *string `json:"etag,omitempty"` - - // A token to request the next page of results. - NextPageToken *string `json:"nextPageToken,omitempty"` - - // Tables in the requested dataset. - Tables []ListFormatTable `json:"tables,omitempty"` - - // The total number of tables in the dataset. - TotalItems *int32 `json:"totalItems,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableMetadataCacheUsage -type TableMetadataCacheUsage struct { - // Metadata caching eligible table referenced in the query. - TableReference *TableReference `json:"tableReference,omitempty"` - - // Reason for not using metadata caching for the table. - UnusedReason *string `json:"unusedReason,omitempty"` - - // Free form human-readable reason metadata caching was unused for - // the job. - Explanation *string `json:"explanation,omitempty"` - - // Duration since last refresh as of this job for managed tables (indicates - // metadata cache staleness as seen by this job). - Staleness *string `json:"staleness,omitempty"` - - // [Table - // type](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.type). - TableType *string `json:"tableType,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableReference -type TableReference struct { - // Required. The ID of the project containing this table. - ProjectId *string `json:"projectId"` - - // Required. The ID of the dataset containing this table. - DatasetId *string `json:"datasetId"` - - // Required. The ID of the table. The ID can contain Unicode characters in + // The Id of the table. The Id can contain Unicode characters in // category L (letter), M (mark), N (number), Pc (connector, including // underscore), Pd (dash), and Zs (space). For more information, see [General // Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). // The maximum length is 1,024 characters. Certain operations allow suffixing - // of the table ID with a partition decorator, such as + // of the table Id with a partition decorator, such as // `sample_table$20190123`. - TableId *string `json:"tableId"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableReplicationInfo -type TableReplicationInfo struct { - // Required. Source table reference that is replicated. - SourceTable *TableReference `json:"sourceTable,omitempty"` - - // Optional. Specifies the interval at which the source table is polled for - // updates. - // It's Optional. If not specified, default replication interval would be - // applied. - ReplicationIntervalMs *int64 `json:"replicationIntervalMs,omitempty"` - - // Optional. Output only. If source is a materialized view, this field - // signifies the last refresh time of the source. - ReplicatedSourceLastRefreshTime *int64 `json:"replicatedSourceLastRefreshTime,omitempty"` - - // Optional. Output only. Replication status of configured replication. - ReplicationStatus *string `json:"replicationStatus,omitempty"` - - // Optional. Output only. Replication error that will permanently stopped - // table replication. - ReplicationError *ErrorProto `json:"replicationError,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TableSchema -type TableSchema struct { - // Describes the fields in a table. - Fields []TableFieldSchema `json:"fields,omitempty"` - - // Optional. Specifies metadata of the foreign data type definition in field - // schema - // ([TableFieldSchema.foreign_type_definition][google.cloud.bigquery.v2.TableFieldSchema.foreign_type_definition]). - ForeignTypeInfo *ForeignTypeInfo `json:"foreignTypeInfo,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.TimePartitioning -type TimePartitioning struct { - // Required. The supported types are DAY, HOUR, MONTH, and YEAR, which will - // generate one partition per day, hour, month, and year, respectively. - Type *string `json:"type,omitempty"` - - // Optional. Number of milliseconds for which to keep the storage for a - // partition. - // A wrapper is used here because 0 is an invalid value. - ExpirationMs *int64 `json:"expirationMs,omitempty"` - - // Optional. If not set, the table is partitioned by pseudo - // column '_PARTITIONTIME'; if set, the table is partitioned by this field. - // The field must be a top-level TIMESTAMP or DATE field. Its mode must be - // NULLABLE or REQUIRED. - // A wrapper is used here because an empty string is an invalid value. - Field *string `json:"field,omitempty"` + // +required + TableId *string `json:"tableId,omitempty"` } -// +kcc:proto=google.cloud.bigquery.v2.TransformColumn -type TransformColumn struct { - // Output only. Name of the column. - Name *string `json:"name,omitempty"` - - // Output only. Data type of the column after the transform. - Type *StandardSqlDataType `json:"type,omitempty"` - - // Output only. The SQL expression used in the column transform. - TransformSql *string `json:"transformSql,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.UserDefinedFunctionResource -type UserDefinedFunctionResource struct { - // [Pick one] A code resource to load from a Google Cloud Storage URI - // (gs://bucket/path). - ResourceUri *string `json:"resourceUri,omitempty"` - - // [Pick one] An inline resource that contains code for a user-defined - // function (UDF). Providing a inline code resource is equivalent to providing - // a URI for a file containing the same code. - InlineCode *string `json:"inlineCode,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.VectorSearchStatistics -type VectorSearchStatistics struct { - // Specifies the index usage mode for the query. - IndexUsageMode *string `json:"indexUsageMode,omitempty"` - - // When `indexUsageMode` is `UNUSED` or `PARTIALLY_USED`, this field explains - // why indexes were not used in all or part of the vector search query. If - // `indexUsageMode` is `FULLY_USED`, this field is not populated. - IndexUnusedReasons []IndexUnusedReason `json:"indexUnusedReasons,omitempty"` -} - -// +kcc:proto=google.cloud.bigquery.v2.ViewDefinition -type ViewDefinition struct { - // Required. A query that BigQuery executes when the view is referenced. - Query *string `json:"query,omitempty"` - - // Describes user-defined function resources used in the query. - UserDefinedFunctionResources []UserDefinedFunctionResource `json:"userDefinedFunctionResources,omitempty"` - - // Specifies whether to use BigQuery's legacy SQL for this view. - // The default value is true. If set to false, the view will use - // BigQuery's GoogleSQL: - // https://cloud.google.com/bigquery/sql-reference/ - // - // Queries and views that reference this view must use the same flag value. - // A wrapper is used here because the default value is True. - UseLegacySql *bool `json:"useLegacySql,omitempty"` - - // True if the column names are explicitly specified. For example by using the - // 'CREATE VIEW v(c1, c2) AS ...' syntax. - // Can only be set for GoogleSQL views. - UseExplicitColumnNames *bool `json:"useExplicitColumnNames,omitempty"` - - // Optional. Specifics the privacy policy for the view. - PrivacyPolicy *PrivacyPolicy `json:"privacyPolicy,omitempty"` - - // Optional. Foreign view representations. - ForeignDefinitions []ForeignViewDefinition `json:"foreignDefinitions,omitempty"` +// +kcc:proto=google.protobuf.BoolValue +type BoolValue struct { + // The bool value. + Value *bool `json:"value,omitempty"` } diff --git a/apis/bigquery/v1beta1/zz_generated.deepcopy.go b/apis/bigquery/v1beta1/zz_generated.deepcopy.go index b9cb208e43..bf12f8e403 100644 --- a/apis/bigquery/v1beta1/zz_generated.deepcopy.go +++ b/apis/bigquery/v1beta1/zz_generated.deepcopy.go @@ -84,143 +84,6 @@ func (in *Access) DeepCopy() *Access { return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AggregationThresholdPolicy) DeepCopyInto(out *AggregationThresholdPolicy) { - *out = *in - if in.Threshold != nil { - in, out := &in.Threshold, &out.Threshold - *out = new(int64) - **out = **in - } - if in.PrivacyUnitColumns != nil { - in, out := &in.PrivacyUnitColumns, &out.PrivacyUnitColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AggregationThresholdPolicy. -func (in *AggregationThresholdPolicy) DeepCopy() *AggregationThresholdPolicy { - if in == nil { - return nil - } - out := new(AggregationThresholdPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *AvroOptions) DeepCopyInto(out *AvroOptions) { - *out = *in - if in.UseAvroLogicalTypes != nil { - in, out := &in.UseAvroLogicalTypes, &out.UseAvroLogicalTypes - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AvroOptions. -func (in *AvroOptions) DeepCopy() *AvroOptions { - if in == nil { - return nil - } - out := new(AvroOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BiEngineReason) DeepCopyInto(out *BiEngineReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(string) - **out = **in - } - if in.Message != nil { - in, out := &in.Message, &out.Message - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BiEngineReason. -func (in *BiEngineReason) DeepCopy() *BiEngineReason { - if in == nil { - return nil - } - out := new(BiEngineReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BiEngineStatistics) DeepCopyInto(out *BiEngineStatistics) { - *out = *in - if in.BiEngineMode != nil { - in, out := &in.BiEngineMode, &out.BiEngineMode - *out = new(string) - **out = **in - } - if in.AccelerationMode != nil { - in, out := &in.AccelerationMode, &out.AccelerationMode - *out = new(string) - **out = **in - } - if in.BiEngineReasons != nil { - in, out := &in.BiEngineReasons, &out.BiEngineReasons - *out = make([]BiEngineReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BiEngineStatistics. -func (in *BiEngineStatistics) DeepCopy() *BiEngineStatistics { - if in == nil { - return nil - } - out := new(BiEngineStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigLakeConfiguration) DeepCopyInto(out *BigLakeConfiguration) { - *out = *in - if in.ConnectionID != nil { - in, out := &in.ConnectionID, &out.ConnectionID - *out = new(string) - **out = **in - } - if in.StorageUri != nil { - in, out := &in.StorageUri, &out.StorageUri - *out = new(string) - **out = **in - } - if in.FileFormat != nil { - in, out := &in.FileFormat, &out.FileFormat - *out = new(string) - **out = **in - } - if in.TableFormat != nil { - in, out := &in.TableFormat, &out.TableFormat - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigLakeConfiguration. -func (in *BigLakeConfiguration) DeepCopy() *BigLakeConfiguration { - if in == nil { - return nil - } - out := new(BigLakeConfiguration) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BigQueryDataset) DeepCopyInto(out *BigQueryDataset) { *out = *in @@ -295,6 +158,41 @@ func (in *BigQueryDatasetObservedState) DeepCopy() *BigQueryDatasetObservedState return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDatasetParent) DeepCopyInto(out *BigQueryDatasetParent) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetParent. +func (in *BigQueryDatasetParent) DeepCopy() *BigQueryDatasetParent { + if in == nil { + return nil + } + out := new(BigQueryDatasetParent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDatasetRef) DeepCopyInto(out *BigQueryDatasetRef) { + *out = *in + if in.parent != nil { + in, out := &in.parent, &out.parent + *out = new(BigQueryDatasetParent) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetRef. +func (in *BigQueryDatasetRef) DeepCopy() *BigQueryDatasetRef { + if in == nil { + return nil + } + out := new(BigQueryDatasetRef) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BigQueryDatasetSpec) DeepCopyInto(out *BigQueryDatasetSpec) { *out = *in @@ -395,6 +293,11 @@ func (in *BigQueryDatasetStatus) DeepCopyInto(out *BigQueryDatasetStatus) { *out = new(string) **out = **in } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } if in.LastModifiedTime != nil { in, out := &in.LastModifiedTime, &out.LastModifiedTime *out = new(int64) @@ -423,7238 +326,209 @@ func (in *BigQueryDatasetStatus) DeepCopy() *BigQueryDatasetStatus { } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigtableColumn) DeepCopyInto(out *BigtableColumn) { +func (in *BoolValue) DeepCopyInto(out *BoolValue) { *out = *in - if in.QualifierEncoded != nil { - in, out := &in.QualifierEncoded, &out.QualifierEncoded - *out = new(byte) - **out = **in - } - if in.QualifierString != nil { - in, out := &in.QualifierString, &out.QualifierString - *out = new(string) - **out = **in - } - if in.FieldName != nil { - in, out := &in.FieldName, &out.FieldName - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.OnlyReadLatest != nil { - in, out := &in.OnlyReadLatest, &out.OnlyReadLatest + if in.Value != nil { + in, out := &in.Value, &out.Value *out = new(bool) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableColumn. -func (in *BigtableColumn) DeepCopy() *BigtableColumn { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BoolValue. +func (in *BoolValue) DeepCopy() *BoolValue { if in == nil { return nil } - out := new(BigtableColumn) + out := new(BoolValue) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigtableColumnFamily) DeepCopyInto(out *BigtableColumnFamily) { +func (in *DatasetAccessEntry) DeepCopyInto(out *DatasetAccessEntry) { *out = *in - if in.FamilyID != nil { - in, out := &in.FamilyID, &out.FamilyID - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.Columns != nil { - in, out := &in.Columns, &out.Columns - *out = make([]BigtableColumn, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } + if in.Dataset != nil { + in, out := &in.Dataset, &out.Dataset + *out = new(DatasetReference) + (*in).DeepCopyInto(*out) } - if in.OnlyReadLatest != nil { - in, out := &in.OnlyReadLatest, &out.OnlyReadLatest - *out = new(bool) - **out = **in + if in.TargetTypes != nil { + in, out := &in.TargetTypes, &out.TargetTypes + *out = make([]string, len(*in)) + copy(*out, *in) } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableColumnFamily. -func (in *BigtableColumnFamily) DeepCopy() *BigtableColumnFamily { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessEntry. +func (in *DatasetAccessEntry) DeepCopy() *DatasetAccessEntry { if in == nil { return nil } - out := new(BigtableColumnFamily) + out := new(DatasetAccessEntry) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigtableOptions) DeepCopyInto(out *BigtableOptions) { +func (in *DatasetReference) DeepCopyInto(out *DatasetReference) { *out = *in - if in.ColumnFamilies != nil { - in, out := &in.ColumnFamilies, &out.ColumnFamilies - *out = make([]BigtableColumnFamily, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.IgnoreUnspecifiedColumnFamilies != nil { - in, out := &in.IgnoreUnspecifiedColumnFamilies, &out.IgnoreUnspecifiedColumnFamilies - *out = new(bool) - **out = **in - } - if in.ReadRowkeyAsString != nil { - in, out := &in.ReadRowkeyAsString, &out.ReadRowkeyAsString - *out = new(bool) + if in.DatasetId != nil { + in, out := &in.DatasetId, &out.DatasetId + *out = new(string) **out = **in } - if in.OutputColumnFamiliesAsJson != nil { - in, out := &in.OutputColumnFamiliesAsJson, &out.OutputColumnFamiliesAsJson - *out = new(bool) + if in.ProjectId != nil { + in, out := &in.ProjectId, &out.ProjectId + *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigtableOptions. -func (in *BigtableOptions) DeepCopy() *BigtableOptions { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetReference. +func (in *DatasetReference) DeepCopy() *DatasetReference { if in == nil { return nil } - out := new(BigtableOptions) + out := new(DatasetReference) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CloneDefinition) DeepCopyInto(out *CloneDefinition) { +func (in *EncryptionConfiguration) DeepCopyInto(out *EncryptionConfiguration) { *out = *in - if in.BaseTableReference != nil { - in, out := &in.BaseTableReference, &out.BaseTableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.CloneTime != nil { - in, out := &in.CloneTime, &out.CloneTime - *out = new(string) + if in.KmsKeyRef != nil { + in, out := &in.KmsKeyRef, &out.KmsKeyRef + *out = new(refsv1beta1.KMSCryptoKeyRef) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CloneDefinition. -func (in *CloneDefinition) DeepCopy() *CloneDefinition { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfiguration. +func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration { if in == nil { return nil } - out := new(CloneDefinition) + out := new(EncryptionConfiguration) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Clustering) DeepCopyInto(out *Clustering) { +func (in *ExternalCatalogDatasetOptions) DeepCopyInto(out *ExternalCatalogDatasetOptions) { *out = *in - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]string, len(*in)) - copy(*out, *in) + if in.Parameters != nil { + in, out := &in.Parameters, &out.Parameters + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.DefaultStorageLocationUri != nil { + in, out := &in.DefaultStorageLocationUri, &out.DefaultStorageLocationUri + *out = new(string) + **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Clustering. -func (in *Clustering) DeepCopy() *Clustering { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalCatalogDatasetOptions. +func (in *ExternalCatalogDatasetOptions) DeepCopy() *ExternalCatalogDatasetOptions { if in == nil { return nil } - out := new(Clustering) + out := new(ExternalCatalogDatasetOptions) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ColumnReference) DeepCopyInto(out *ColumnReference) { +func (in *ExternalDatasetReference) DeepCopyInto(out *ExternalDatasetReference) { *out = *in - if in.ReferencingColumn != nil { - in, out := &in.ReferencingColumn, &out.ReferencingColumn + if in.ExternalSource != nil { + in, out := &in.ExternalSource, &out.ExternalSource *out = new(string) **out = **in } - if in.ReferencedColumn != nil { - in, out := &in.ReferencedColumn, &out.ReferencedColumn + if in.Connection != nil { + in, out := &in.Connection, &out.Connection *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ColumnReference. -func (in *ColumnReference) DeepCopy() *ColumnReference { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReference. +func (in *ExternalDatasetReference) DeepCopy() *ExternalDatasetReference { if in == nil { return nil } - out := new(ColumnReference) + out := new(ExternalDatasetReference) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ConnectionProperty) DeepCopyInto(out *ConnectionProperty) { +func (in *GcpTag) DeepCopyInto(out *GcpTag) { *out = *in - if in.Key != nil { - in, out := &in.Key, &out.Key + if in.TagKey != nil { + in, out := &in.TagKey, &out.TagKey *out = new(string) **out = **in } - if in.Value != nil { - in, out := &in.Value, &out.Value + if in.TagValue != nil { + in, out := &in.TagValue, &out.TagValue *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConnectionProperty. -func (in *ConnectionProperty) DeepCopy() *ConnectionProperty { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GcpTag. +func (in *GcpTag) DeepCopy() *GcpTag { if in == nil { return nil } - out := new(ConnectionProperty) + out := new(GcpTag) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CopyJobStatistics) DeepCopyInto(out *CopyJobStatistics) { +func (in *LinkedDatasetMetadata) DeepCopyInto(out *LinkedDatasetMetadata) { *out = *in - if in.CopiedRows != nil { - in, out := &in.CopiedRows, &out.CopiedRows - *out = new(int64) - **out = **in - } - if in.CopiedLogicalBytes != nil { - in, out := &in.CopiedLogicalBytes, &out.CopiedLogicalBytes - *out = new(int64) - **out = **in - } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CopyJobStatistics. -func (in *CopyJobStatistics) DeepCopy() *CopyJobStatistics { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetMetadata. +func (in *LinkedDatasetMetadata) DeepCopy() *LinkedDatasetMetadata { if in == nil { return nil } - out := new(CopyJobStatistics) + out := new(LinkedDatasetMetadata) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *CsvOptions) DeepCopyInto(out *CsvOptions) { +func (in *LinkedDatasetSource) DeepCopyInto(out *LinkedDatasetSource) { *out = *in - if in.FieldDelimiter != nil { - in, out := &in.FieldDelimiter, &out.FieldDelimiter - *out = new(string) - **out = **in + if in.SourceDataset != nil { + in, out := &in.SourceDataset, &out.SourceDataset + *out = new(DatasetReference) + (*in).DeepCopyInto(*out) } - if in.SkipLeadingRows != nil { - in, out := &in.SkipLeadingRows, &out.SkipLeadingRows - *out = new(int64) - **out = **in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetSource. +func (in *LinkedDatasetSource) DeepCopy() *LinkedDatasetSource { + if in == nil { + return nil } - if in.Quote != nil { - in, out := &in.Quote, &out.Quote - *out = new(string) - **out = **in - } - if in.AllowQuotedNewlines != nil { - in, out := &in.AllowQuotedNewlines, &out.AllowQuotedNewlines - *out = new(bool) - **out = **in - } - if in.AllowJaggedRows != nil { - in, out := &in.AllowJaggedRows, &out.AllowJaggedRows - *out = new(bool) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.PreserveAsciiControlCharacters != nil { - in, out := &in.PreserveAsciiControlCharacters, &out.PreserveAsciiControlCharacters - *out = new(bool) - **out = **in - } - if in.NullMarker != nil { - in, out := &in.NullMarker, &out.NullMarker - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CsvOptions. -func (in *CsvOptions) DeepCopy() *CsvOptions { - if in == nil { - return nil - } - out := new(CsvOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataFormatOptions) DeepCopyInto(out *DataFormatOptions) { - *out = *in - if in.UseInt64Timestamp != nil { - in, out := &in.UseInt64Timestamp, &out.UseInt64Timestamp - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataFormatOptions. -func (in *DataFormatOptions) DeepCopy() *DataFormatOptions { - if in == nil { - return nil - } - out := new(DataFormatOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataMaskingStatistics) DeepCopyInto(out *DataMaskingStatistics) { - *out = *in - if in.DataMaskingApplied != nil { - in, out := &in.DataMaskingApplied, &out.DataMaskingApplied - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataMaskingStatistics. -func (in *DataMaskingStatistics) DeepCopy() *DataMaskingStatistics { - if in == nil { - return nil - } - out := new(DataMaskingStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DataPolicyOption) DeepCopyInto(out *DataPolicyOption) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataPolicyOption. -func (in *DataPolicyOption) DeepCopy() *DataPolicyOption { - if in == nil { - return nil - } - out := new(DataPolicyOption) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Dataset) DeepCopyInto(out *Dataset) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.SelfLink != nil { - in, out := &in.SelfLink, &out.SelfLink - *out = new(string) - **out = **in - } - if in.DatasetReference != nil { - in, out := &in.DatasetReference, &out.DatasetReference - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.DefaultTableExpirationMs != nil { - in, out := &in.DefaultTableExpirationMs, &out.DefaultTableExpirationMs - *out = new(int64) - **out = **in - } - if in.DefaultPartitionExpirationMs != nil { - in, out := &in.DefaultPartitionExpirationMs, &out.DefaultPartitionExpirationMs - *out = new(int64) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.Access != nil { - in, out := &in.Access, &out.Access - *out = make([]Access, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(int64) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.DefaultEncryptionConfiguration != nil { - in, out := &in.DefaultEncryptionConfiguration, &out.DefaultEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.SatisfiesPzs != nil { - in, out := &in.SatisfiesPzs, &out.SatisfiesPzs - *out = new(bool) - **out = **in - } - if in.SatisfiesPzi != nil { - in, out := &in.SatisfiesPzi, &out.SatisfiesPzi - *out = new(bool) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.LinkedDatasetSource != nil { - in, out := &in.LinkedDatasetSource, &out.LinkedDatasetSource - *out = new(LinkedDatasetSource) - (*in).DeepCopyInto(*out) - } - if in.LinkedDatasetMetadata != nil { - in, out := &in.LinkedDatasetMetadata, &out.LinkedDatasetMetadata - *out = new(LinkedDatasetMetadata) - **out = **in - } - if in.ExternalDatasetReference != nil { - in, out := &in.ExternalDatasetReference, &out.ExternalDatasetReference - *out = new(ExternalDatasetReference) - (*in).DeepCopyInto(*out) - } - if in.ExternalCatalogDatasetOptions != nil { - in, out := &in.ExternalCatalogDatasetOptions, &out.ExternalCatalogDatasetOptions - *out = new(ExternalCatalogDatasetOptions) - (*in).DeepCopyInto(*out) - } - if in.IsCaseInsensitive != nil { - in, out := &in.IsCaseInsensitive, &out.IsCaseInsensitive - *out = new(bool) - **out = **in - } - if in.DefaultCollation != nil { - in, out := &in.DefaultCollation, &out.DefaultCollation - *out = new(string) - **out = **in - } - if in.DefaultRoundingMode != nil { - in, out := &in.DefaultRoundingMode, &out.DefaultRoundingMode - *out = new(string) - **out = **in - } - if in.MaxTimeTravelHours != nil { - in, out := &in.MaxTimeTravelHours, &out.MaxTimeTravelHours - *out = new(int64) - **out = **in - } - if in.Tags != nil { - in, out := &in.Tags, &out.Tags - *out = make([]GcpTag, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.StorageBillingModel != nil { - in, out := &in.StorageBillingModel, &out.StorageBillingModel - *out = new(string) - **out = **in - } - if in.Restrictions != nil { - in, out := &in.Restrictions, &out.Restrictions - *out = new(RestrictionConfig) - (*in).DeepCopyInto(*out) - } - if in.ResourceTags != nil { - in, out := &in.ResourceTags, &out.ResourceTags - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Dataset. -func (in *Dataset) DeepCopy() *Dataset { - if in == nil { - return nil - } - out := new(Dataset) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DatasetAccessEntry) DeepCopyInto(out *DatasetAccessEntry) { - *out = *in - if in.Dataset != nil { - in, out := &in.Dataset, &out.Dataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.TargetTypes != nil { - in, out := &in.TargetTypes, &out.TargetTypes - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetAccessEntry. -func (in *DatasetAccessEntry) DeepCopy() *DatasetAccessEntry { - if in == nil { - return nil - } - out := new(DatasetAccessEntry) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DatasetList) DeepCopyInto(out *DatasetList) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.NextPageToken != nil { - in, out := &in.NextPageToken, &out.NextPageToken - *out = new(string) - **out = **in - } - if in.Datasets != nil { - in, out := &in.Datasets, &out.Datasets - *out = make([]ListFormatDataset, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Unreachable != nil { - in, out := &in.Unreachable, &out.Unreachable - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetList. -func (in *DatasetList) DeepCopy() *DatasetList { - if in == nil { - return nil - } - out := new(DatasetList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DatasetReference) DeepCopyInto(out *DatasetReference) { - *out = *in - if in.DatasetId != nil { - in, out := &in.DatasetId, &out.DatasetId - *out = new(string) - **out = **in - } - if in.ProjectId != nil { - in, out := &in.ProjectId, &out.ProjectId - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetReference. -func (in *DatasetReference) DeepCopy() *DatasetReference { - if in == nil { - return nil - } - out := new(DatasetReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DestinationTableProperties) DeepCopyInto(out *DestinationTableProperties) { - *out = *in - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DestinationTableProperties. -func (in *DestinationTableProperties) DeepCopy() *DestinationTableProperties { - if in == nil { - return nil - } - out := new(DestinationTableProperties) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DifferentialPrivacyPolicy) DeepCopyInto(out *DifferentialPrivacyPolicy) { - *out = *in - if in.MaxEpsilonPerQuery != nil { - in, out := &in.MaxEpsilonPerQuery, &out.MaxEpsilonPerQuery - *out = new(float64) - **out = **in - } - if in.DeltaPerQuery != nil { - in, out := &in.DeltaPerQuery, &out.DeltaPerQuery - *out = new(float64) - **out = **in - } - if in.MaxGroupsContributed != nil { - in, out := &in.MaxGroupsContributed, &out.MaxGroupsContributed - *out = new(int64) - **out = **in - } - if in.PrivacyUnitColumn != nil { - in, out := &in.PrivacyUnitColumn, &out.PrivacyUnitColumn - *out = new(string) - **out = **in - } - if in.EpsilonBudget != nil { - in, out := &in.EpsilonBudget, &out.EpsilonBudget - *out = new(float64) - **out = **in - } - if in.DeltaBudget != nil { - in, out := &in.DeltaBudget, &out.DeltaBudget - *out = new(float64) - **out = **in - } - if in.EpsilonBudgetRemaining != nil { - in, out := &in.EpsilonBudgetRemaining, &out.EpsilonBudgetRemaining - *out = new(float64) - **out = **in - } - if in.DeltaBudgetRemaining != nil { - in, out := &in.DeltaBudgetRemaining, &out.DeltaBudgetRemaining - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DifferentialPrivacyPolicy. -func (in *DifferentialPrivacyPolicy) DeepCopy() *DifferentialPrivacyPolicy { - if in == nil { - return nil - } - out := new(DifferentialPrivacyPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *DmlStats) DeepCopyInto(out *DmlStats) { - *out = *in - if in.InsertedRowCount != nil { - in, out := &in.InsertedRowCount, &out.InsertedRowCount - *out = new(int64) - **out = **in - } - if in.DeletedRowCount != nil { - in, out := &in.DeletedRowCount, &out.DeletedRowCount - *out = new(int64) - **out = **in - } - if in.UpdatedRowCount != nil { - in, out := &in.UpdatedRowCount, &out.UpdatedRowCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DmlStats. -func (in *DmlStats) DeepCopy() *DmlStats { - if in == nil { - return nil - } - out := new(DmlStats) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *EncryptionConfiguration) DeepCopyInto(out *EncryptionConfiguration) { - *out = *in - if in.KmsKeyRef != nil { - in, out := &in.KmsKeyRef, &out.KmsKeyRef - *out = new(refsv1beta1.KMSCryptoKeyRef) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfiguration. -func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration { - if in == nil { - return nil - } - out := new(EncryptionConfiguration) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ErrorProto) DeepCopyInto(out *ErrorProto) { - *out = *in - if in.Reason != nil { - in, out := &in.Reason, &out.Reason - *out = new(string) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.DebugInfo != nil { - in, out := &in.DebugInfo, &out.DebugInfo - *out = new(string) - **out = **in - } - if in.Message != nil { - in, out := &in.Message, &out.Message - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ErrorProto. -func (in *ErrorProto) DeepCopy() *ErrorProto { - if in == nil { - return nil - } - out := new(ErrorProto) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExplainQueryStage) DeepCopyInto(out *ExplainQueryStage) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(int64) - **out = **in - } - if in.StartMs != nil { - in, out := &in.StartMs, &out.StartMs - *out = new(int64) - **out = **in - } - if in.EndMs != nil { - in, out := &in.EndMs, &out.EndMs - *out = new(int64) - **out = **in - } - if in.InputStages != nil { - in, out := &in.InputStages, &out.InputStages - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.WaitRatioAvg != nil { - in, out := &in.WaitRatioAvg, &out.WaitRatioAvg - *out = new(float64) - **out = **in - } - if in.WaitMsAvg != nil { - in, out := &in.WaitMsAvg, &out.WaitMsAvg - *out = new(int64) - **out = **in - } - if in.WaitRatioMax != nil { - in, out := &in.WaitRatioMax, &out.WaitRatioMax - *out = new(float64) - **out = **in - } - if in.WaitMsMax != nil { - in, out := &in.WaitMsMax, &out.WaitMsMax - *out = new(int64) - **out = **in - } - if in.ReadRatioAvg != nil { - in, out := &in.ReadRatioAvg, &out.ReadRatioAvg - *out = new(float64) - **out = **in - } - if in.ReadMsAvg != nil { - in, out := &in.ReadMsAvg, &out.ReadMsAvg - *out = new(int64) - **out = **in - } - if in.ReadRatioMax != nil { - in, out := &in.ReadRatioMax, &out.ReadRatioMax - *out = new(float64) - **out = **in - } - if in.ReadMsMax != nil { - in, out := &in.ReadMsMax, &out.ReadMsMax - *out = new(int64) - **out = **in - } - if in.ComputeRatioAvg != nil { - in, out := &in.ComputeRatioAvg, &out.ComputeRatioAvg - *out = new(float64) - **out = **in - } - if in.ComputeMsAvg != nil { - in, out := &in.ComputeMsAvg, &out.ComputeMsAvg - *out = new(int64) - **out = **in - } - if in.ComputeRatioMax != nil { - in, out := &in.ComputeRatioMax, &out.ComputeRatioMax - *out = new(float64) - **out = **in - } - if in.ComputeMsMax != nil { - in, out := &in.ComputeMsMax, &out.ComputeMsMax - *out = new(int64) - **out = **in - } - if in.WriteRatioAvg != nil { - in, out := &in.WriteRatioAvg, &out.WriteRatioAvg - *out = new(float64) - **out = **in - } - if in.WriteMsAvg != nil { - in, out := &in.WriteMsAvg, &out.WriteMsAvg - *out = new(int64) - **out = **in - } - if in.WriteRatioMax != nil { - in, out := &in.WriteRatioMax, &out.WriteRatioMax - *out = new(float64) - **out = **in - } - if in.WriteMsMax != nil { - in, out := &in.WriteMsMax, &out.WriteMsMax - *out = new(int64) - **out = **in - } - if in.ShuffleOutputBytes != nil { - in, out := &in.ShuffleOutputBytes, &out.ShuffleOutputBytes - *out = new(int64) - **out = **in - } - if in.ShuffleOutputBytesSpilled != nil { - in, out := &in.ShuffleOutputBytesSpilled, &out.ShuffleOutputBytesSpilled - *out = new(int64) - **out = **in - } - if in.RecordsRead != nil { - in, out := &in.RecordsRead, &out.RecordsRead - *out = new(int64) - **out = **in - } - if in.RecordsWritten != nil { - in, out := &in.RecordsWritten, &out.RecordsWritten - *out = new(int64) - **out = **in - } - if in.ParallelInputs != nil { - in, out := &in.ParallelInputs, &out.ParallelInputs - *out = new(int64) - **out = **in - } - if in.CompletedParallelInputs != nil { - in, out := &in.CompletedParallelInputs, &out.CompletedParallelInputs - *out = new(int64) - **out = **in - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(string) - **out = **in - } - if in.Steps != nil { - in, out := &in.Steps, &out.Steps - *out = make([]ExplainQueryStep, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SlotMs != nil { - in, out := &in.SlotMs, &out.SlotMs - *out = new(int64) - **out = **in - } - if in.ComputeMode != nil { - in, out := &in.ComputeMode, &out.ComputeMode - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExplainQueryStage. -func (in *ExplainQueryStage) DeepCopy() *ExplainQueryStage { - if in == nil { - return nil - } - out := new(ExplainQueryStage) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExplainQueryStep) DeepCopyInto(out *ExplainQueryStep) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Substeps != nil { - in, out := &in.Substeps, &out.Substeps - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExplainQueryStep. -func (in *ExplainQueryStep) DeepCopy() *ExplainQueryStep { - if in == nil { - return nil - } - out := new(ExplainQueryStep) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExportDataStatistics) DeepCopyInto(out *ExportDataStatistics) { - *out = *in - if in.FileCount != nil { - in, out := &in.FileCount, &out.FileCount - *out = new(int64) - **out = **in - } - if in.RowCount != nil { - in, out := &in.RowCount, &out.RowCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExportDataStatistics. -func (in *ExportDataStatistics) DeepCopy() *ExportDataStatistics { - if in == nil { - return nil - } - out := new(ExportDataStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalCatalogDatasetOptions) DeepCopyInto(out *ExternalCatalogDatasetOptions) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.DefaultStorageLocationUri != nil { - in, out := &in.DefaultStorageLocationUri, &out.DefaultStorageLocationUri - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalCatalogDatasetOptions. -func (in *ExternalCatalogDatasetOptions) DeepCopy() *ExternalCatalogDatasetOptions { - if in == nil { - return nil - } - out := new(ExternalCatalogDatasetOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalCatalogTableOptions) DeepCopyInto(out *ExternalCatalogTableOptions) { - *out = *in - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.StorageDescriptor != nil { - in, out := &in.StorageDescriptor, &out.StorageDescriptor - *out = new(StorageDescriptor) - (*in).DeepCopyInto(*out) - } - if in.ConnectionID != nil { - in, out := &in.ConnectionID, &out.ConnectionID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalCatalogTableOptions. -func (in *ExternalCatalogTableOptions) DeepCopy() *ExternalCatalogTableOptions { - if in == nil { - return nil - } - out := new(ExternalCatalogTableOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalDataConfiguration) DeepCopyInto(out *ExternalDataConfiguration) { - *out = *in - if in.SourceUris != nil { - in, out := &in.SourceUris, &out.SourceUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.FileSetSpecType != nil { - in, out := &in.FileSetSpecType, &out.FileSetSpecType - *out = new(string) - **out = **in - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.SourceFormat != nil { - in, out := &in.SourceFormat, &out.SourceFormat - *out = new(string) - **out = **in - } - if in.MaxBadRecords != nil { - in, out := &in.MaxBadRecords, &out.MaxBadRecords - *out = new(int32) - **out = **in - } - if in.Autodetect != nil { - in, out := &in.Autodetect, &out.Autodetect - *out = new(bool) - **out = **in - } - if in.IgnoreUnknownValues != nil { - in, out := &in.IgnoreUnknownValues, &out.IgnoreUnknownValues - *out = new(bool) - **out = **in - } - if in.Compression != nil { - in, out := &in.Compression, &out.Compression - *out = new(string) - **out = **in - } - if in.CsvOptions != nil { - in, out := &in.CsvOptions, &out.CsvOptions - *out = new(CsvOptions) - (*in).DeepCopyInto(*out) - } - if in.JsonOptions != nil { - in, out := &in.JsonOptions, &out.JsonOptions - *out = new(JsonOptions) - (*in).DeepCopyInto(*out) - } - if in.BigtableOptions != nil { - in, out := &in.BigtableOptions, &out.BigtableOptions - *out = new(BigtableOptions) - (*in).DeepCopyInto(*out) - } - if in.GoogleSheetsOptions != nil { - in, out := &in.GoogleSheetsOptions, &out.GoogleSheetsOptions - *out = new(GoogleSheetsOptions) - (*in).DeepCopyInto(*out) - } - if in.HivePartitioningOptions != nil { - in, out := &in.HivePartitioningOptions, &out.HivePartitioningOptions - *out = new(HivePartitioningOptions) - (*in).DeepCopyInto(*out) - } - if in.ConnectionID != nil { - in, out := &in.ConnectionID, &out.ConnectionID - *out = new(string) - **out = **in - } - if in.DecimalTargetTypes != nil { - in, out := &in.DecimalTargetTypes, &out.DecimalTargetTypes - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.AvroOptions != nil { - in, out := &in.AvroOptions, &out.AvroOptions - *out = new(AvroOptions) - (*in).DeepCopyInto(*out) - } - if in.JsonExtension != nil { - in, out := &in.JsonExtension, &out.JsonExtension - *out = new(string) - **out = **in - } - if in.ParquetOptions != nil { - in, out := &in.ParquetOptions, &out.ParquetOptions - *out = new(ParquetOptions) - (*in).DeepCopyInto(*out) - } - if in.ObjectMetadata != nil { - in, out := &in.ObjectMetadata, &out.ObjectMetadata - *out = new(string) - **out = **in - } - if in.ReferenceFileSchemaUri != nil { - in, out := &in.ReferenceFileSchemaUri, &out.ReferenceFileSchemaUri - *out = new(string) - **out = **in - } - if in.MetadataCacheMode != nil { - in, out := &in.MetadataCacheMode, &out.MetadataCacheMode - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDataConfiguration. -func (in *ExternalDataConfiguration) DeepCopy() *ExternalDataConfiguration { - if in == nil { - return nil - } - out := new(ExternalDataConfiguration) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalDatasetReference) DeepCopyInto(out *ExternalDatasetReference) { - *out = *in - if in.ExternalSource != nil { - in, out := &in.ExternalSource, &out.ExternalSource - *out = new(string) - **out = **in - } - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDatasetReference. -func (in *ExternalDatasetReference) DeepCopy() *ExternalDatasetReference { - if in == nil { - return nil - } - out := new(ExternalDatasetReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ExternalServiceCost) DeepCopyInto(out *ExternalServiceCost) { - *out = *in - if in.ExternalService != nil { - in, out := &in.ExternalService, &out.ExternalService - *out = new(string) - **out = **in - } - if in.BytesProcessed != nil { - in, out := &in.BytesProcessed, &out.BytesProcessed - *out = new(int64) - **out = **in - } - if in.BytesBilled != nil { - in, out := &in.BytesBilled, &out.BytesBilled - *out = new(int64) - **out = **in - } - if in.SlotMs != nil { - in, out := &in.SlotMs, &out.SlotMs - *out = new(int64) - **out = **in - } - if in.ReservedSlotCount != nil { - in, out := &in.ReservedSlotCount, &out.ReservedSlotCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalServiceCost. -func (in *ExternalServiceCost) DeepCopy() *ExternalServiceCost { - if in == nil { - return nil - } - out := new(ExternalServiceCost) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForeignKey) DeepCopyInto(out *ForeignKey) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ReferencedTable != nil { - in, out := &in.ReferencedTable, &out.ReferencedTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.ColumnReferences != nil { - in, out := &in.ColumnReferences, &out.ColumnReferences - *out = make([]ColumnReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignKey. -func (in *ForeignKey) DeepCopy() *ForeignKey { - if in == nil { - return nil - } - out := new(ForeignKey) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForeignTypeInfo) DeepCopyInto(out *ForeignTypeInfo) { - *out = *in - if in.TypeSystem != nil { - in, out := &in.TypeSystem, &out.TypeSystem - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignTypeInfo. -func (in *ForeignTypeInfo) DeepCopy() *ForeignTypeInfo { - if in == nil { - return nil - } - out := new(ForeignTypeInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ForeignViewDefinition) DeepCopyInto(out *ForeignViewDefinition) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.Dialect != nil { - in, out := &in.Dialect, &out.Dialect - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ForeignViewDefinition. -func (in *ForeignViewDefinition) DeepCopy() *ForeignViewDefinition { - if in == nil { - return nil - } - out := new(ForeignViewDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GcpTag) DeepCopyInto(out *GcpTag) { - *out = *in - if in.TagKey != nil { - in, out := &in.TagKey, &out.TagKey - *out = new(string) - **out = **in - } - if in.TagValue != nil { - in, out := &in.TagValue, &out.TagValue - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GcpTag. -func (in *GcpTag) DeepCopy() *GcpTag { - if in == nil { - return nil - } - out := new(GcpTag) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *GoogleSheetsOptions) DeepCopyInto(out *GoogleSheetsOptions) { - *out = *in - if in.SkipLeadingRows != nil { - in, out := &in.SkipLeadingRows, &out.SkipLeadingRows - *out = new(int64) - **out = **in - } - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GoogleSheetsOptions. -func (in *GoogleSheetsOptions) DeepCopy() *GoogleSheetsOptions { - if in == nil { - return nil - } - out := new(GoogleSheetsOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *HighCardinalityJoin) DeepCopyInto(out *HighCardinalityJoin) { - *out = *in - if in.LeftRows != nil { - in, out := &in.LeftRows, &out.LeftRows - *out = new(int64) - **out = **in - } - if in.RightRows != nil { - in, out := &in.RightRows, &out.RightRows - *out = new(int64) - **out = **in - } - if in.OutputRows != nil { - in, out := &in.OutputRows, &out.OutputRows - *out = new(int64) - **out = **in - } - if in.StepIndex != nil { - in, out := &in.StepIndex, &out.StepIndex - *out = new(int32) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HighCardinalityJoin. -func (in *HighCardinalityJoin) DeepCopy() *HighCardinalityJoin { - if in == nil { - return nil - } - out := new(HighCardinalityJoin) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *HivePartitioningOptions) DeepCopyInto(out *HivePartitioningOptions) { - *out = *in - if in.Mode != nil { - in, out := &in.Mode, &out.Mode - *out = new(string) - **out = **in - } - if in.SourceUriPrefix != nil { - in, out := &in.SourceUriPrefix, &out.SourceUriPrefix - *out = new(string) - **out = **in - } - if in.RequirePartitionFilter != nil { - in, out := &in.RequirePartitionFilter, &out.RequirePartitionFilter - *out = new(bool) - **out = **in - } - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HivePartitioningOptions. -func (in *HivePartitioningOptions) DeepCopy() *HivePartitioningOptions { - if in == nil { - return nil - } - out := new(HivePartitioningOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *IndexUnusedReason) DeepCopyInto(out *IndexUnusedReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(string) - **out = **in - } - if in.Message != nil { - in, out := &in.Message, &out.Message - *out = new(string) - **out = **in - } - if in.BaseTable != nil { - in, out := &in.BaseTable, &out.BaseTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.IndexName != nil { - in, out := &in.IndexName, &out.IndexName - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new IndexUnusedReason. -func (in *IndexUnusedReason) DeepCopy() *IndexUnusedReason { - if in == nil { - return nil - } - out := new(IndexUnusedReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *InputDataChange) DeepCopyInto(out *InputDataChange) { - *out = *in - if in.RecordsReadDiffPercentage != nil { - in, out := &in.RecordsReadDiffPercentage, &out.RecordsReadDiffPercentage - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new InputDataChange. -func (in *InputDataChange) DeepCopy() *InputDataChange { - if in == nil { - return nil - } - out := new(InputDataChange) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Job) DeepCopyInto(out *Job) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.SelfLink != nil { - in, out := &in.SelfLink, &out.SelfLink - *out = new(string) - **out = **in - } - if in.UserEmail != nil { - in, out := &in.UserEmail, &out.UserEmail - *out = new(string) - **out = **in - } - if in.Configuration != nil { - in, out := &in.Configuration, &out.Configuration - *out = new(JobConfiguration) - (*in).DeepCopyInto(*out) - } - if in.JobReference != nil { - in, out := &in.JobReference, &out.JobReference - *out = new(JobReference) - (*in).DeepCopyInto(*out) - } - if in.Statistics != nil { - in, out := &in.Statistics, &out.Statistics - *out = new(JobStatistics) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(JobStatus) - (*in).DeepCopyInto(*out) - } - if in.PrincipalSubject != nil { - in, out := &in.PrincipalSubject, &out.PrincipalSubject - *out = new(string) - **out = **in - } - if in.JobCreationReason != nil { - in, out := &in.JobCreationReason, &out.JobCreationReason - *out = new(JobCreationReason) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Job. -func (in *Job) DeepCopy() *Job { - if in == nil { - return nil - } - out := new(Job) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfiguration) DeepCopyInto(out *JobConfiguration) { - *out = *in - if in.JobType != nil { - in, out := &in.JobType, &out.JobType - *out = new(string) - **out = **in - } - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(JobConfigurationQuery) - (*in).DeepCopyInto(*out) - } - if in.Load != nil { - in, out := &in.Load, &out.Load - *out = new(JobConfigurationLoad) - (*in).DeepCopyInto(*out) - } - if in.Copy != nil { - in, out := &in.Copy, &out.Copy - *out = new(JobConfigurationTableCopy) - (*in).DeepCopyInto(*out) - } - if in.Extract != nil { - in, out := &in.Extract, &out.Extract - *out = new(JobConfigurationExtract) - (*in).DeepCopyInto(*out) - } - if in.DryRun != nil { - in, out := &in.DryRun, &out.DryRun - *out = new(bool) - **out = **in - } - if in.JobTimeoutMs != nil { - in, out := &in.JobTimeoutMs, &out.JobTimeoutMs - *out = new(int64) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfiguration. -func (in *JobConfiguration) DeepCopy() *JobConfiguration { - if in == nil { - return nil - } - out := new(JobConfiguration) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationExtract) DeepCopyInto(out *JobConfigurationExtract) { - *out = *in - if in.SourceTable != nil { - in, out := &in.SourceTable, &out.SourceTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.SourceModel != nil { - in, out := &in.SourceModel, &out.SourceModel - *out = new(ModelReference) - (*in).DeepCopyInto(*out) - } - if in.DestinationUris != nil { - in, out := &in.DestinationUris, &out.DestinationUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.PrintHeader != nil { - in, out := &in.PrintHeader, &out.PrintHeader - *out = new(bool) - **out = **in - } - if in.FieldDelimiter != nil { - in, out := &in.FieldDelimiter, &out.FieldDelimiter - *out = new(string) - **out = **in - } - if in.DestinationFormat != nil { - in, out := &in.DestinationFormat, &out.DestinationFormat - *out = new(string) - **out = **in - } - if in.Compression != nil { - in, out := &in.Compression, &out.Compression - *out = new(string) - **out = **in - } - if in.UseAvroLogicalTypes != nil { - in, out := &in.UseAvroLogicalTypes, &out.UseAvroLogicalTypes - *out = new(bool) - **out = **in - } - if in.ModelExtractOptions != nil { - in, out := &in.ModelExtractOptions, &out.ModelExtractOptions - *out = new(JobConfigurationExtract_ModelExtractOptions) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationExtract. -func (in *JobConfigurationExtract) DeepCopy() *JobConfigurationExtract { - if in == nil { - return nil - } - out := new(JobConfigurationExtract) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationExtract_ModelExtractOptions) DeepCopyInto(out *JobConfigurationExtract_ModelExtractOptions) { - *out = *in - if in.TrialID != nil { - in, out := &in.TrialID, &out.TrialID - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationExtract_ModelExtractOptions. -func (in *JobConfigurationExtract_ModelExtractOptions) DeepCopy() *JobConfigurationExtract_ModelExtractOptions { - if in == nil { - return nil - } - out := new(JobConfigurationExtract_ModelExtractOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationLoad) DeepCopyInto(out *JobConfigurationLoad) { - *out = *in - if in.SourceUris != nil { - in, out := &in.SourceUris, &out.SourceUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.FileSetSpecType != nil { - in, out := &in.FileSetSpecType, &out.FileSetSpecType - *out = new(string) - **out = **in - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.DestinationTable != nil { - in, out := &in.DestinationTable, &out.DestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DestinationTableProperties != nil { - in, out := &in.DestinationTableProperties, &out.DestinationTableProperties - *out = new(DestinationTableProperties) - (*in).DeepCopyInto(*out) - } - if in.CreateDisposition != nil { - in, out := &in.CreateDisposition, &out.CreateDisposition - *out = new(string) - **out = **in - } - if in.WriteDisposition != nil { - in, out := &in.WriteDisposition, &out.WriteDisposition - *out = new(string) - **out = **in - } - if in.NullMarker != nil { - in, out := &in.NullMarker, &out.NullMarker - *out = new(string) - **out = **in - } - if in.FieldDelimiter != nil { - in, out := &in.FieldDelimiter, &out.FieldDelimiter - *out = new(string) - **out = **in - } - if in.SkipLeadingRows != nil { - in, out := &in.SkipLeadingRows, &out.SkipLeadingRows - *out = new(int32) - **out = **in - } - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } - if in.Quote != nil { - in, out := &in.Quote, &out.Quote - *out = new(string) - **out = **in - } - if in.MaxBadRecords != nil { - in, out := &in.MaxBadRecords, &out.MaxBadRecords - *out = new(int32) - **out = **in - } - if in.AllowQuotedNewlines != nil { - in, out := &in.AllowQuotedNewlines, &out.AllowQuotedNewlines - *out = new(bool) - **out = **in - } - if in.SourceFormat != nil { - in, out := &in.SourceFormat, &out.SourceFormat - *out = new(string) - **out = **in - } - if in.AllowJaggedRows != nil { - in, out := &in.AllowJaggedRows, &out.AllowJaggedRows - *out = new(bool) - **out = **in - } - if in.IgnoreUnknownValues != nil { - in, out := &in.IgnoreUnknownValues, &out.IgnoreUnknownValues - *out = new(bool) - **out = **in - } - if in.ProjectionFields != nil { - in, out := &in.ProjectionFields, &out.ProjectionFields - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Autodetect != nil { - in, out := &in.Autodetect, &out.Autodetect - *out = new(bool) - **out = **in - } - if in.SchemaUpdateOptions != nil { - in, out := &in.SchemaUpdateOptions, &out.SchemaUpdateOptions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.DestinationEncryptionConfiguration != nil { - in, out := &in.DestinationEncryptionConfiguration, &out.DestinationEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.UseAvroLogicalTypes != nil { - in, out := &in.UseAvroLogicalTypes, &out.UseAvroLogicalTypes - *out = new(bool) - **out = **in - } - if in.ReferenceFileSchemaUri != nil { - in, out := &in.ReferenceFileSchemaUri, &out.ReferenceFileSchemaUri - *out = new(string) - **out = **in - } - if in.HivePartitioningOptions != nil { - in, out := &in.HivePartitioningOptions, &out.HivePartitioningOptions - *out = new(HivePartitioningOptions) - (*in).DeepCopyInto(*out) - } - if in.DecimalTargetTypes != nil { - in, out := &in.DecimalTargetTypes, &out.DecimalTargetTypes - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.JsonExtension != nil { - in, out := &in.JsonExtension, &out.JsonExtension - *out = new(string) - **out = **in - } - if in.ParquetOptions != nil { - in, out := &in.ParquetOptions, &out.ParquetOptions - *out = new(ParquetOptions) - (*in).DeepCopyInto(*out) - } - if in.PreserveAsciiControlCharacters != nil { - in, out := &in.PreserveAsciiControlCharacters, &out.PreserveAsciiControlCharacters - *out = new(bool) - **out = **in - } - if in.ConnectionProperties != nil { - in, out := &in.ConnectionProperties, &out.ConnectionProperties - *out = make([]ConnectionProperty, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreateSession != nil { - in, out := &in.CreateSession, &out.CreateSession - *out = new(bool) - **out = **in - } - if in.ColumnNameCharacterMap != nil { - in, out := &in.ColumnNameCharacterMap, &out.ColumnNameCharacterMap - *out = new(string) - **out = **in - } - if in.CopyFilesOnly != nil { - in, out := &in.CopyFilesOnly, &out.CopyFilesOnly - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationLoad. -func (in *JobConfigurationLoad) DeepCopy() *JobConfigurationLoad { - if in == nil { - return nil - } - out := new(JobConfigurationLoad) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationQuery) DeepCopyInto(out *JobConfigurationQuery) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.DestinationTable != nil { - in, out := &in.DestinationTable, &out.DestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.UserDefinedFunctionResources != nil { - in, out := &in.UserDefinedFunctionResources, &out.UserDefinedFunctionResources - *out = make([]UserDefinedFunctionResource, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreateDisposition != nil { - in, out := &in.CreateDisposition, &out.CreateDisposition - *out = new(string) - **out = **in - } - if in.WriteDisposition != nil { - in, out := &in.WriteDisposition, &out.WriteDisposition - *out = new(string) - **out = **in - } - if in.DefaultDataset != nil { - in, out := &in.DefaultDataset, &out.DefaultDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.Priority != nil { - in, out := &in.Priority, &out.Priority - *out = new(string) - **out = **in - } - if in.AllowLargeResults != nil { - in, out := &in.AllowLargeResults, &out.AllowLargeResults - *out = new(bool) - **out = **in - } - if in.UseQueryCache != nil { - in, out := &in.UseQueryCache, &out.UseQueryCache - *out = new(bool) - **out = **in - } - if in.FlattenResults != nil { - in, out := &in.FlattenResults, &out.FlattenResults - *out = new(bool) - **out = **in - } - if in.MaximumBytesBilled != nil { - in, out := &in.MaximumBytesBilled, &out.MaximumBytesBilled - *out = new(int64) - **out = **in - } - if in.UseLegacySql != nil { - in, out := &in.UseLegacySql, &out.UseLegacySql - *out = new(bool) - **out = **in - } - if in.ParameterMode != nil { - in, out := &in.ParameterMode, &out.ParameterMode - *out = new(string) - **out = **in - } - if in.QueryParameters != nil { - in, out := &in.QueryParameters, &out.QueryParameters - *out = make([]QueryParameter, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SystemVariables != nil { - in, out := &in.SystemVariables, &out.SystemVariables - *out = new(SystemVariables) - (*in).DeepCopyInto(*out) - } - if in.SchemaUpdateOptions != nil { - in, out := &in.SchemaUpdateOptions, &out.SchemaUpdateOptions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.DestinationEncryptionConfiguration != nil { - in, out := &in.DestinationEncryptionConfiguration, &out.DestinationEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.ScriptOptions != nil { - in, out := &in.ScriptOptions, &out.ScriptOptions - *out = new(ScriptOptions) - (*in).DeepCopyInto(*out) - } - if in.ConnectionProperties != nil { - in, out := &in.ConnectionProperties, &out.ConnectionProperties - *out = make([]ConnectionProperty, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.CreateSession != nil { - in, out := &in.CreateSession, &out.CreateSession - *out = new(bool) - **out = **in - } - if in.Continuous != nil { - in, out := &in.Continuous, &out.Continuous - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationQuery. -func (in *JobConfigurationQuery) DeepCopy() *JobConfigurationQuery { - if in == nil { - return nil - } - out := new(JobConfigurationQuery) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobConfigurationTableCopy) DeepCopyInto(out *JobConfigurationTableCopy) { - *out = *in - if in.SourceTable != nil { - in, out := &in.SourceTable, &out.SourceTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.SourceTables != nil { - in, out := &in.SourceTables, &out.SourceTables - *out = make([]TableReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.DestinationTable != nil { - in, out := &in.DestinationTable, &out.DestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.CreateDisposition != nil { - in, out := &in.CreateDisposition, &out.CreateDisposition - *out = new(string) - **out = **in - } - if in.WriteDisposition != nil { - in, out := &in.WriteDisposition, &out.WriteDisposition - *out = new(string) - **out = **in - } - if in.DestinationEncryptionConfiguration != nil { - in, out := &in.DestinationEncryptionConfiguration, &out.DestinationEncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.OperationType != nil { - in, out := &in.OperationType, &out.OperationType - *out = new(string) - **out = **in - } - if in.DestinationExpirationTime != nil { - in, out := &in.DestinationExpirationTime, &out.DestinationExpirationTime - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobConfigurationTableCopy. -func (in *JobConfigurationTableCopy) DeepCopy() *JobConfigurationTableCopy { - if in == nil { - return nil - } - out := new(JobConfigurationTableCopy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobCreationReason) DeepCopyInto(out *JobCreationReason) { - *out = *in - if in.Code != nil { - in, out := &in.Code, &out.Code - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobCreationReason. -func (in *JobCreationReason) DeepCopy() *JobCreationReason { - if in == nil { - return nil - } - out := new(JobCreationReason) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobList) DeepCopyInto(out *JobList) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.NextPageToken != nil { - in, out := &in.NextPageToken, &out.NextPageToken - *out = new(string) - **out = **in - } - if in.Jobs != nil { - in, out := &in.Jobs, &out.Jobs - *out = make([]ListFormatJob, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Unreachable != nil { - in, out := &in.Unreachable, &out.Unreachable - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobList. -func (in *JobList) DeepCopy() *JobList { - if in == nil { - return nil - } - out := new(JobList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobReference) DeepCopyInto(out *JobReference) { - *out = *in - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } - if in.JobID != nil { - in, out := &in.JobID, &out.JobID - *out = new(string) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.LocationAlternative != nil { - in, out := &in.LocationAlternative, &out.LocationAlternative - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobReference. -func (in *JobReference) DeepCopy() *JobReference { - if in == nil { - return nil - } - out := new(JobReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics) DeepCopyInto(out *JobStatistics) { - *out = *in - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.StartTime != nil { - in, out := &in.StartTime, &out.StartTime - *out = new(int64) - **out = **in - } - if in.EndTime != nil { - in, out := &in.EndTime, &out.EndTime - *out = new(int64) - **out = **in - } - if in.TotalBytesProcessed != nil { - in, out := &in.TotalBytesProcessed, &out.TotalBytesProcessed - *out = new(int64) - **out = **in - } - if in.CompletionRatio != nil { - in, out := &in.CompletionRatio, &out.CompletionRatio - *out = new(float64) - **out = **in - } - if in.QuotaDeferments != nil { - in, out := &in.QuotaDeferments, &out.QuotaDeferments - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(JobStatistics2) - (*in).DeepCopyInto(*out) - } - if in.Load != nil { - in, out := &in.Load, &out.Load - *out = new(JobStatistics3) - (*in).DeepCopyInto(*out) - } - if in.Extract != nil { - in, out := &in.Extract, &out.Extract - *out = new(JobStatistics4) - (*in).DeepCopyInto(*out) - } - if in.Copy != nil { - in, out := &in.Copy, &out.Copy - *out = new(CopyJobStatistics) - (*in).DeepCopyInto(*out) - } - if in.TotalSlotMs != nil { - in, out := &in.TotalSlotMs, &out.TotalSlotMs - *out = new(int64) - **out = **in - } - if in.ReservationID != nil { - in, out := &in.ReservationID, &out.ReservationID - *out = new(string) - **out = **in - } - if in.NumChildJobs != nil { - in, out := &in.NumChildJobs, &out.NumChildJobs - *out = new(int64) - **out = **in - } - if in.ParentJobID != nil { - in, out := &in.ParentJobID, &out.ParentJobID - *out = new(string) - **out = **in - } - if in.ScriptStatistics != nil { - in, out := &in.ScriptStatistics, &out.ScriptStatistics - *out = new(ScriptStatistics) - (*in).DeepCopyInto(*out) - } - if in.RowLevelSecurityStatistics != nil { - in, out := &in.RowLevelSecurityStatistics, &out.RowLevelSecurityStatistics - *out = new(RowLevelSecurityStatistics) - (*in).DeepCopyInto(*out) - } - if in.DataMaskingStatistics != nil { - in, out := &in.DataMaskingStatistics, &out.DataMaskingStatistics - *out = new(DataMaskingStatistics) - (*in).DeepCopyInto(*out) - } - if in.TransactionInfo != nil { - in, out := &in.TransactionInfo, &out.TransactionInfo - *out = new(JobStatistics_TransactionInfo) - (*in).DeepCopyInto(*out) - } - if in.SessionInfo != nil { - in, out := &in.SessionInfo, &out.SessionInfo - *out = new(SessionInfo) - (*in).DeepCopyInto(*out) - } - if in.FinalExecutionDurationMs != nil { - in, out := &in.FinalExecutionDurationMs, &out.FinalExecutionDurationMs - *out = new(int64) - **out = **in - } - if in.Edition != nil { - in, out := &in.Edition, &out.Edition - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics. -func (in *JobStatistics) DeepCopy() *JobStatistics { - if in == nil { - return nil - } - out := new(JobStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics2) DeepCopyInto(out *JobStatistics2) { - *out = *in - if in.QueryPlan != nil { - in, out := &in.QueryPlan, &out.QueryPlan - *out = make([]ExplainQueryStage, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.EstimatedBytesProcessed != nil { - in, out := &in.EstimatedBytesProcessed, &out.EstimatedBytesProcessed - *out = new(int64) - **out = **in - } - if in.Timeline != nil { - in, out := &in.Timeline, &out.Timeline - *out = make([]QueryTimelineSample, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.TotalPartitionsProcessed != nil { - in, out := &in.TotalPartitionsProcessed, &out.TotalPartitionsProcessed - *out = new(int64) - **out = **in - } - if in.TotalBytesProcessed != nil { - in, out := &in.TotalBytesProcessed, &out.TotalBytesProcessed - *out = new(int64) - **out = **in - } - if in.TotalBytesProcessedAccuracy != nil { - in, out := &in.TotalBytesProcessedAccuracy, &out.TotalBytesProcessedAccuracy - *out = new(string) - **out = **in - } - if in.TotalBytesBilled != nil { - in, out := &in.TotalBytesBilled, &out.TotalBytesBilled - *out = new(int64) - **out = **in - } - if in.BillingTier != nil { - in, out := &in.BillingTier, &out.BillingTier - *out = new(int32) - **out = **in - } - if in.TotalSlotMs != nil { - in, out := &in.TotalSlotMs, &out.TotalSlotMs - *out = new(int64) - **out = **in - } - if in.CacheHit != nil { - in, out := &in.CacheHit, &out.CacheHit - *out = new(bool) - **out = **in - } - if in.ReferencedTables != nil { - in, out := &in.ReferencedTables, &out.ReferencedTables - *out = make([]TableReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ReferencedRoutines != nil { - in, out := &in.ReferencedRoutines, &out.ReferencedRoutines - *out = make([]RoutineReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.NumDmlAffectedRows != nil { - in, out := &in.NumDmlAffectedRows, &out.NumDmlAffectedRows - *out = new(int64) - **out = **in - } - if in.DmlStats != nil { - in, out := &in.DmlStats, &out.DmlStats - *out = new(DmlStats) - (*in).DeepCopyInto(*out) - } - if in.UndeclaredQueryParameters != nil { - in, out := &in.UndeclaredQueryParameters, &out.UndeclaredQueryParameters - *out = make([]QueryParameter, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.StatementType != nil { - in, out := &in.StatementType, &out.StatementType - *out = new(string) - **out = **in - } - if in.DdlOperationPerformed != nil { - in, out := &in.DdlOperationPerformed, &out.DdlOperationPerformed - *out = new(string) - **out = **in - } - if in.DdlTargetTable != nil { - in, out := &in.DdlTargetTable, &out.DdlTargetTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DdlDestinationTable != nil { - in, out := &in.DdlDestinationTable, &out.DdlDestinationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DdlTargetRowAccessPolicy != nil { - in, out := &in.DdlTargetRowAccessPolicy, &out.DdlTargetRowAccessPolicy - *out = new(RowAccessPolicyReference) - (*in).DeepCopyInto(*out) - } - if in.DdlAffectedRowAccessPolicyCount != nil { - in, out := &in.DdlAffectedRowAccessPolicyCount, &out.DdlAffectedRowAccessPolicyCount - *out = new(int64) - **out = **in - } - if in.DdlTargetRoutine != nil { - in, out := &in.DdlTargetRoutine, &out.DdlTargetRoutine - *out = new(RoutineReference) - (*in).DeepCopyInto(*out) - } - if in.DdlTargetDataset != nil { - in, out := &in.DdlTargetDataset, &out.DdlTargetDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.MlStatistics != nil { - in, out := &in.MlStatistics, &out.MlStatistics - *out = new(MlStatistics) - (*in).DeepCopyInto(*out) - } - if in.ExportDataStatistics != nil { - in, out := &in.ExportDataStatistics, &out.ExportDataStatistics - *out = new(ExportDataStatistics) - (*in).DeepCopyInto(*out) - } - if in.ExternalServiceCosts != nil { - in, out := &in.ExternalServiceCosts, &out.ExternalServiceCosts - *out = make([]ExternalServiceCost, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.BiEngineStatistics != nil { - in, out := &in.BiEngineStatistics, &out.BiEngineStatistics - *out = new(BiEngineStatistics) - (*in).DeepCopyInto(*out) - } - if in.LoadQueryStatistics != nil { - in, out := &in.LoadQueryStatistics, &out.LoadQueryStatistics - *out = new(LoadQueryStatistics) - (*in).DeepCopyInto(*out) - } - if in.DclTargetTable != nil { - in, out := &in.DclTargetTable, &out.DclTargetTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DclTargetView != nil { - in, out := &in.DclTargetView, &out.DclTargetView - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.DclTargetDataset != nil { - in, out := &in.DclTargetDataset, &out.DclTargetDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.SearchStatistics != nil { - in, out := &in.SearchStatistics, &out.SearchStatistics - *out = new(SearchStatistics) - (*in).DeepCopyInto(*out) - } - if in.VectorSearchStatistics != nil { - in, out := &in.VectorSearchStatistics, &out.VectorSearchStatistics - *out = new(VectorSearchStatistics) - (*in).DeepCopyInto(*out) - } - if in.PerformanceInsights != nil { - in, out := &in.PerformanceInsights, &out.PerformanceInsights - *out = new(PerformanceInsights) - (*in).DeepCopyInto(*out) - } - if in.QueryInfo != nil { - in, out := &in.QueryInfo, &out.QueryInfo - *out = new(QueryInfo) - (*in).DeepCopyInto(*out) - } - if in.SparkStatistics != nil { - in, out := &in.SparkStatistics, &out.SparkStatistics - *out = new(SparkStatistics) - (*in).DeepCopyInto(*out) - } - if in.TransferredBytes != nil { - in, out := &in.TransferredBytes, &out.TransferredBytes - *out = new(int64) - **out = **in - } - if in.MaterializedViewStatistics != nil { - in, out := &in.MaterializedViewStatistics, &out.MaterializedViewStatistics - *out = new(MaterializedViewStatistics) - (*in).DeepCopyInto(*out) - } - if in.MetadataCacheStatistics != nil { - in, out := &in.MetadataCacheStatistics, &out.MetadataCacheStatistics - *out = new(MetadataCacheStatistics) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics2. -func (in *JobStatistics2) DeepCopy() *JobStatistics2 { - if in == nil { - return nil - } - out := new(JobStatistics2) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics3) DeepCopyInto(out *JobStatistics3) { - *out = *in - if in.InputFiles != nil { - in, out := &in.InputFiles, &out.InputFiles - *out = new(int64) - **out = **in - } - if in.InputFileBytes != nil { - in, out := &in.InputFileBytes, &out.InputFileBytes - *out = new(int64) - **out = **in - } - if in.OutputRows != nil { - in, out := &in.OutputRows, &out.OutputRows - *out = new(int64) - **out = **in - } - if in.OutputBytes != nil { - in, out := &in.OutputBytes, &out.OutputBytes - *out = new(int64) - **out = **in - } - if in.BadRecords != nil { - in, out := &in.BadRecords, &out.BadRecords - *out = new(int64) - **out = **in - } - if in.Timeline != nil { - in, out := &in.Timeline, &out.Timeline - *out = make([]QueryTimelineSample, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics3. -func (in *JobStatistics3) DeepCopy() *JobStatistics3 { - if in == nil { - return nil - } - out := new(JobStatistics3) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics4) DeepCopyInto(out *JobStatistics4) { - *out = *in - if in.DestinationUriFileCounts != nil { - in, out := &in.DestinationUriFileCounts, &out.DestinationUriFileCounts - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.InputBytes != nil { - in, out := &in.InputBytes, &out.InputBytes - *out = new(int64) - **out = **in - } - if in.Timeline != nil { - in, out := &in.Timeline, &out.Timeline - *out = make([]QueryTimelineSample, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics4. -func (in *JobStatistics4) DeepCopy() *JobStatistics4 { - if in == nil { - return nil - } - out := new(JobStatistics4) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatistics_TransactionInfo) DeepCopyInto(out *JobStatistics_TransactionInfo) { - *out = *in - if in.TransactionID != nil { - in, out := &in.TransactionID, &out.TransactionID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatistics_TransactionInfo. -func (in *JobStatistics_TransactionInfo) DeepCopy() *JobStatistics_TransactionInfo { - if in == nil { - return nil - } - out := new(JobStatistics_TransactionInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JobStatus) DeepCopyInto(out *JobStatus) { - *out = *in - if in.ErrorResult != nil { - in, out := &in.ErrorResult, &out.ErrorResult - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } - if in.Errors != nil { - in, out := &in.Errors, &out.Errors - *out = make([]ErrorProto, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.State != nil { - in, out := &in.State, &out.State - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JobStatus. -func (in *JobStatus) DeepCopy() *JobStatus { - if in == nil { - return nil - } - out := new(JobStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JoinRestrictionPolicy) DeepCopyInto(out *JoinRestrictionPolicy) { - *out = *in - if in.JoinCondition != nil { - in, out := &in.JoinCondition, &out.JoinCondition - *out = new(string) - **out = **in - } - if in.JoinAllowedColumns != nil { - in, out := &in.JoinAllowedColumns, &out.JoinAllowedColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JoinRestrictionPolicy. -func (in *JoinRestrictionPolicy) DeepCopy() *JoinRestrictionPolicy { - if in == nil { - return nil - } - out := new(JoinRestrictionPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *JsonOptions) DeepCopyInto(out *JsonOptions) { - *out = *in - if in.Encoding != nil { - in, out := &in.Encoding, &out.Encoding - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new JsonOptions. -func (in *JsonOptions) DeepCopy() *JsonOptions { - if in == nil { - return nil - } - out := new(JsonOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LinkedDatasetMetadata) DeepCopyInto(out *LinkedDatasetMetadata) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetMetadata. -func (in *LinkedDatasetMetadata) DeepCopy() *LinkedDatasetMetadata { - if in == nil { - return nil - } - out := new(LinkedDatasetMetadata) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LinkedDatasetSource) DeepCopyInto(out *LinkedDatasetSource) { - *out = *in - if in.SourceDataset != nil { - in, out := &in.SourceDataset, &out.SourceDataset - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LinkedDatasetSource. -func (in *LinkedDatasetSource) DeepCopy() *LinkedDatasetSource { - if in == nil { - return nil - } - out := new(LinkedDatasetSource) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatDataset) DeepCopyInto(out *ListFormatDataset) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.DatasetReference != nil { - in, out := &in.DatasetReference, &out.DatasetReference - *out = new(DatasetReference) - (*in).DeepCopyInto(*out) - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatDataset. -func (in *ListFormatDataset) DeepCopy() *ListFormatDataset { - if in == nil { - return nil - } - out := new(ListFormatDataset) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatJob) DeepCopyInto(out *ListFormatJob) { - *out = *in - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.JobReference != nil { - in, out := &in.JobReference, &out.JobReference - *out = new(JobReference) - (*in).DeepCopyInto(*out) - } - if in.State != nil { - in, out := &in.State, &out.State - *out = new(string) - **out = **in - } - if in.ErrorResult != nil { - in, out := &in.ErrorResult, &out.ErrorResult - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } - if in.Statistics != nil { - in, out := &in.Statistics, &out.Statistics - *out = new(JobStatistics) - (*in).DeepCopyInto(*out) - } - if in.Configuration != nil { - in, out := &in.Configuration, &out.Configuration - *out = new(JobConfiguration) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(JobStatus) - (*in).DeepCopyInto(*out) - } - if in.UserEmail != nil { - in, out := &in.UserEmail, &out.UserEmail - *out = new(string) - **out = **in - } - if in.PrincipalSubject != nil { - in, out := &in.PrincipalSubject, &out.PrincipalSubject - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatJob. -func (in *ListFormatJob) DeepCopy() *ListFormatJob { - if in == nil { - return nil - } - out := new(ListFormatJob) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatTable) DeepCopyInto(out *ListFormatTable) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.View != nil { - in, out := &in.View, &out.View - *out = new(ListFormatView) - (*in).DeepCopyInto(*out) - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.ExpirationTime != nil { - in, out := &in.ExpirationTime, &out.ExpirationTime - *out = new(int64) - **out = **in - } - if in.RequirePartitionFilter != nil { - in, out := &in.RequirePartitionFilter, &out.RequirePartitionFilter - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatTable. -func (in *ListFormatTable) DeepCopy() *ListFormatTable { - if in == nil { - return nil - } - out := new(ListFormatTable) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ListFormatView) DeepCopyInto(out *ListFormatView) { - *out = *in - if in.UseLegacySql != nil { - in, out := &in.UseLegacySql, &out.UseLegacySql - *out = new(bool) - **out = **in - } - if in.PrivacyPolicy != nil { - in, out := &in.PrivacyPolicy, &out.PrivacyPolicy - *out = new(PrivacyPolicy) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListFormatView. -func (in *ListFormatView) DeepCopy() *ListFormatView { - if in == nil { - return nil - } - out := new(ListFormatView) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *LoadQueryStatistics) DeepCopyInto(out *LoadQueryStatistics) { - *out = *in - if in.InputFiles != nil { - in, out := &in.InputFiles, &out.InputFiles - *out = new(int64) - **out = **in - } - if in.InputFileBytes != nil { - in, out := &in.InputFileBytes, &out.InputFileBytes - *out = new(int64) - **out = **in - } - if in.OutputRows != nil { - in, out := &in.OutputRows, &out.OutputRows - *out = new(int64) - **out = **in - } - if in.OutputBytes != nil { - in, out := &in.OutputBytes, &out.OutputBytes - *out = new(int64) - **out = **in - } - if in.BadRecords != nil { - in, out := &in.BadRecords, &out.BadRecords - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new LoadQueryStatistics. -func (in *LoadQueryStatistics) DeepCopy() *LoadQueryStatistics { - if in == nil { - return nil - } - out := new(LoadQueryStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedView) DeepCopyInto(out *MaterializedView) { - *out = *in - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.Chosen != nil { - in, out := &in.Chosen, &out.Chosen - *out = new(bool) - **out = **in - } - if in.EstimatedBytesSaved != nil { - in, out := &in.EstimatedBytesSaved, &out.EstimatedBytesSaved - *out = new(int64) - **out = **in - } - if in.RejectedReason != nil { - in, out := &in.RejectedReason, &out.RejectedReason - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedView. -func (in *MaterializedView) DeepCopy() *MaterializedView { - if in == nil { - return nil - } - out := new(MaterializedView) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedViewDefinition) DeepCopyInto(out *MaterializedViewDefinition) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.LastRefreshTime != nil { - in, out := &in.LastRefreshTime, &out.LastRefreshTime - *out = new(int64) - **out = **in - } - if in.EnableRefresh != nil { - in, out := &in.EnableRefresh, &out.EnableRefresh - *out = new(bool) - **out = **in - } - if in.RefreshIntervalMs != nil { - in, out := &in.RefreshIntervalMs, &out.RefreshIntervalMs - *out = new(uint64) - **out = **in - } - if in.AllowNonIncrementalDefinition != nil { - in, out := &in.AllowNonIncrementalDefinition, &out.AllowNonIncrementalDefinition - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewDefinition. -func (in *MaterializedViewDefinition) DeepCopy() *MaterializedViewDefinition { - if in == nil { - return nil - } - out := new(MaterializedViewDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedViewStatistics) DeepCopyInto(out *MaterializedViewStatistics) { - *out = *in - if in.MaterializedView != nil { - in, out := &in.MaterializedView, &out.MaterializedView - *out = make([]MaterializedView, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewStatistics. -func (in *MaterializedViewStatistics) DeepCopy() *MaterializedViewStatistics { - if in == nil { - return nil - } - out := new(MaterializedViewStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MaterializedViewStatus) DeepCopyInto(out *MaterializedViewStatus) { - *out = *in - if in.RefreshWatermark != nil { - in, out := &in.RefreshWatermark, &out.RefreshWatermark - *out = new(string) - **out = **in - } - if in.LastRefreshStatus != nil { - in, out := &in.LastRefreshStatus, &out.LastRefreshStatus - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MaterializedViewStatus. -func (in *MaterializedViewStatus) DeepCopy() *MaterializedViewStatus { - if in == nil { - return nil - } - out := new(MaterializedViewStatus) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MetadataCacheStatistics) DeepCopyInto(out *MetadataCacheStatistics) { - *out = *in - if in.TableMetadataCacheUsage != nil { - in, out := &in.TableMetadataCacheUsage, &out.TableMetadataCacheUsage - *out = make([]TableMetadataCacheUsage, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MetadataCacheStatistics. -func (in *MetadataCacheStatistics) DeepCopy() *MetadataCacheStatistics { - if in == nil { - return nil - } - out := new(MetadataCacheStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *MlStatistics) DeepCopyInto(out *MlStatistics) { - *out = *in - if in.MaxIterations != nil { - in, out := &in.MaxIterations, &out.MaxIterations - *out = new(int64) - **out = **in - } - if in.IterationResults != nil { - in, out := &in.IterationResults, &out.IterationResults - *out = make([]Model_TrainingRun_IterationResult, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ModelType != nil { - in, out := &in.ModelType, &out.ModelType - *out = new(string) - **out = **in - } - if in.TrainingType != nil { - in, out := &in.TrainingType, &out.TrainingType - *out = new(string) - **out = **in - } - if in.HparamTrials != nil { - in, out := &in.HparamTrials, &out.HparamTrials - *out = make([]Model_HparamTuningTrial, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MlStatistics. -func (in *MlStatistics) DeepCopy() *MlStatistics { - if in == nil { - return nil - } - out := new(MlStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model) DeepCopyInto(out *Model) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ModelReference != nil { - in, out := &in.ModelReference, &out.ModelReference - *out = new(ModelReference) - (*in).DeepCopyInto(*out) - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(int64) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.ExpirationTime != nil { - in, out := &in.ExpirationTime, &out.ExpirationTime - *out = new(int64) - **out = **in - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.EncryptionConfiguration != nil { - in, out := &in.EncryptionConfiguration, &out.EncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.ModelType != nil { - in, out := &in.ModelType, &out.ModelType - *out = new(string) - **out = **in - } - if in.TrainingRuns != nil { - in, out := &in.TrainingRuns, &out.TrainingRuns - *out = make([]Model_TrainingRun, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.FeatureColumns != nil { - in, out := &in.FeatureColumns, &out.FeatureColumns - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.LabelColumns != nil { - in, out := &in.LabelColumns, &out.LabelColumns - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.TransformColumns != nil { - in, out := &in.TransformColumns, &out.TransformColumns - *out = make([]TransformColumn, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.HparamSearchSpaces != nil { - in, out := &in.HparamSearchSpaces, &out.HparamSearchSpaces - *out = new(Model_HparamSearchSpaces) - (*in).DeepCopyInto(*out) - } - if in.DefaultTrialID != nil { - in, out := &in.DefaultTrialID, &out.DefaultTrialID - *out = new(int64) - **out = **in - } - if in.HparamTrials != nil { - in, out := &in.HparamTrials, &out.HparamTrials - *out = make([]Model_HparamTuningTrial, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.OptimalTrialIds != nil { - in, out := &in.OptimalTrialIds, &out.OptimalTrialIds - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.RemoteModelInfo != nil { - in, out := &in.RemoteModelInfo, &out.RemoteModelInfo - *out = new(RemoteModelInfo) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model. -func (in *Model) DeepCopy() *Model { - if in == nil { - return nil - } - out := new(Model) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ModelReference) DeepCopyInto(out *ModelReference) { - *out = *in - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } - if in.DatasetID != nil { - in, out := &in.DatasetID, &out.DatasetID - *out = new(string) - **out = **in - } - if in.ModelID != nil { - in, out := &in.ModelID, &out.ModelID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ModelReference. -func (in *ModelReference) DeepCopy() *ModelReference { - if in == nil { - return nil - } - out := new(ModelReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_AggregateClassificationMetrics) DeepCopyInto(out *Model_AggregateClassificationMetrics) { - *out = *in - if in.Precision != nil { - in, out := &in.Precision, &out.Precision - *out = new(float64) - **out = **in - } - if in.Recall != nil { - in, out := &in.Recall, &out.Recall - *out = new(float64) - **out = **in - } - if in.Accuracy != nil { - in, out := &in.Accuracy, &out.Accuracy - *out = new(float64) - **out = **in - } - if in.Threshold != nil { - in, out := &in.Threshold, &out.Threshold - *out = new(float64) - **out = **in - } - if in.F1Score != nil { - in, out := &in.F1Score, &out.F1Score - *out = new(float64) - **out = **in - } - if in.LogLoss != nil { - in, out := &in.LogLoss, &out.LogLoss - *out = new(float64) - **out = **in - } - if in.RocAuc != nil { - in, out := &in.RocAuc, &out.RocAuc - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_AggregateClassificationMetrics. -func (in *Model_AggregateClassificationMetrics) DeepCopy() *Model_AggregateClassificationMetrics { - if in == nil { - return nil - } - out := new(Model_AggregateClassificationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaFittingMetrics) DeepCopyInto(out *Model_ArimaFittingMetrics) { - *out = *in - if in.LogLikelihood != nil { - in, out := &in.LogLikelihood, &out.LogLikelihood - *out = new(float64) - **out = **in - } - if in.Aic != nil { - in, out := &in.Aic, &out.Aic - *out = new(float64) - **out = **in - } - if in.Variance != nil { - in, out := &in.Variance, &out.Variance - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaFittingMetrics. -func (in *Model_ArimaFittingMetrics) DeepCopy() *Model_ArimaFittingMetrics { - if in == nil { - return nil - } - out := new(Model_ArimaFittingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaForecastingMetrics) DeepCopyInto(out *Model_ArimaForecastingMetrics) { - *out = *in - if in.ArimaSingleModelForecastingMetrics != nil { - in, out := &in.ArimaSingleModelForecastingMetrics, &out.ArimaSingleModelForecastingMetrics - *out = make([]Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaForecastingMetrics. -func (in *Model_ArimaForecastingMetrics) DeepCopy() *Model_ArimaForecastingMetrics { - if in == nil { - return nil - } - out := new(Model_ArimaForecastingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) DeepCopyInto(out *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) { - *out = *in - if in.NonSeasonalOrder != nil { - in, out := &in.NonSeasonalOrder, &out.NonSeasonalOrder - *out = new(Model_ArimaOrder) - (*in).DeepCopyInto(*out) - } - if in.ArimaFittingMetrics != nil { - in, out := &in.ArimaFittingMetrics, &out.ArimaFittingMetrics - *out = new(Model_ArimaFittingMetrics) - (*in).DeepCopyInto(*out) - } - if in.HasDrift != nil { - in, out := &in.HasDrift, &out.HasDrift - *out = new(bool) - **out = **in - } - if in.TimeSeriesID != nil { - in, out := &in.TimeSeriesID, &out.TimeSeriesID - *out = new(string) - **out = **in - } - if in.TimeSeriesIds != nil { - in, out := &in.TimeSeriesIds, &out.TimeSeriesIds - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.SeasonalPeriods != nil { - in, out := &in.SeasonalPeriods, &out.SeasonalPeriods - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.HasHolidayEffect != nil { - in, out := &in.HasHolidayEffect, &out.HasHolidayEffect - *out = new(bool) - **out = **in - } - if in.HasSpikesAndDips != nil { - in, out := &in.HasSpikesAndDips, &out.HasSpikesAndDips - *out = new(bool) - **out = **in - } - if in.HasStepChanges != nil { - in, out := &in.HasStepChanges, &out.HasStepChanges - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics. -func (in *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) DeepCopy() *Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics { - if in == nil { - return nil - } - out := new(Model_ArimaForecastingMetrics_ArimaSingleModelForecastingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ArimaOrder) DeepCopyInto(out *Model_ArimaOrder) { - *out = *in - if in.P != nil { - in, out := &in.P, &out.P - *out = new(int64) - **out = **in - } - if in.D != nil { - in, out := &in.D, &out.D - *out = new(int64) - **out = **in - } - if in.Q != nil { - in, out := &in.Q, &out.Q - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ArimaOrder. -func (in *Model_ArimaOrder) DeepCopy() *Model_ArimaOrder { - if in == nil { - return nil - } - out := new(Model_ArimaOrder) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_BinaryClassificationMetrics) DeepCopyInto(out *Model_BinaryClassificationMetrics) { - *out = *in - if in.AggregateClassificationMetrics != nil { - in, out := &in.AggregateClassificationMetrics, &out.AggregateClassificationMetrics - *out = new(Model_AggregateClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.BinaryConfusionMatrixList != nil { - in, out := &in.BinaryConfusionMatrixList, &out.BinaryConfusionMatrixList - *out = make([]Model_BinaryClassificationMetrics_BinaryConfusionMatrix, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.PositiveLabel != nil { - in, out := &in.PositiveLabel, &out.PositiveLabel - *out = new(string) - **out = **in - } - if in.NegativeLabel != nil { - in, out := &in.NegativeLabel, &out.NegativeLabel - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_BinaryClassificationMetrics. -func (in *Model_BinaryClassificationMetrics) DeepCopy() *Model_BinaryClassificationMetrics { - if in == nil { - return nil - } - out := new(Model_BinaryClassificationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) DeepCopyInto(out *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) { - *out = *in - if in.PositiveClassThreshold != nil { - in, out := &in.PositiveClassThreshold, &out.PositiveClassThreshold - *out = new(float64) - **out = **in - } - if in.TruePositives != nil { - in, out := &in.TruePositives, &out.TruePositives - *out = new(int64) - **out = **in - } - if in.FalsePositives != nil { - in, out := &in.FalsePositives, &out.FalsePositives - *out = new(int64) - **out = **in - } - if in.TrueNegatives != nil { - in, out := &in.TrueNegatives, &out.TrueNegatives - *out = new(int64) - **out = **in - } - if in.FalseNegatives != nil { - in, out := &in.FalseNegatives, &out.FalseNegatives - *out = new(int64) - **out = **in - } - if in.Precision != nil { - in, out := &in.Precision, &out.Precision - *out = new(float64) - **out = **in - } - if in.Recall != nil { - in, out := &in.Recall, &out.Recall - *out = new(float64) - **out = **in - } - if in.F1Score != nil { - in, out := &in.F1Score, &out.F1Score - *out = new(float64) - **out = **in - } - if in.Accuracy != nil { - in, out := &in.Accuracy, &out.Accuracy - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_BinaryClassificationMetrics_BinaryConfusionMatrix. -func (in *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) DeepCopy() *Model_BinaryClassificationMetrics_BinaryConfusionMatrix { - if in == nil { - return nil - } - out := new(Model_BinaryClassificationMetrics_BinaryConfusionMatrix) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_BoostedTreeOptionEnums) DeepCopyInto(out *Model_BoostedTreeOptionEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_BoostedTreeOptionEnums. -func (in *Model_BoostedTreeOptionEnums) DeepCopy() *Model_BoostedTreeOptionEnums { - if in == nil { - return nil - } - out := new(Model_BoostedTreeOptionEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_CategoryEncodingMethod) DeepCopyInto(out *Model_CategoryEncodingMethod) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_CategoryEncodingMethod. -func (in *Model_CategoryEncodingMethod) DeepCopy() *Model_CategoryEncodingMethod { - if in == nil { - return nil - } - out := new(Model_CategoryEncodingMethod) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics) DeepCopyInto(out *Model_ClusteringMetrics) { - *out = *in - if in.DaviesBouldinIndex != nil { - in, out := &in.DaviesBouldinIndex, &out.DaviesBouldinIndex - *out = new(float64) - **out = **in - } - if in.MeanSquaredDistance != nil { - in, out := &in.MeanSquaredDistance, &out.MeanSquaredDistance - *out = new(float64) - **out = **in - } - if in.Clusters != nil { - in, out := &in.Clusters, &out.Clusters - *out = make([]Model_ClusteringMetrics_Cluster, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics. -func (in *Model_ClusteringMetrics) DeepCopy() *Model_ClusteringMetrics { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster) DeepCopyInto(out *Model_ClusteringMetrics_Cluster) { - *out = *in - if in.CentroidID != nil { - in, out := &in.CentroidID, &out.CentroidID - *out = new(int64) - **out = **in - } - if in.FeatureValues != nil { - in, out := &in.FeatureValues, &out.FeatureValues - *out = make([]Model_ClusteringMetrics_Cluster_FeatureValue, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Count != nil { - in, out := &in.Count, &out.Count - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster. -func (in *Model_ClusteringMetrics_Cluster) DeepCopy() *Model_ClusteringMetrics_Cluster { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue) DeepCopyInto(out *Model_ClusteringMetrics_Cluster_FeatureValue) { - *out = *in - if in.FeatureColumn != nil { - in, out := &in.FeatureColumn, &out.FeatureColumn - *out = new(string) - **out = **in - } - if in.NumericalValue != nil { - in, out := &in.NumericalValue, &out.NumericalValue - *out = new(float64) - **out = **in - } - if in.CategoricalValue != nil { - in, out := &in.CategoricalValue, &out.CategoricalValue - *out = new(Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster_FeatureValue. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue) DeepCopy() *Model_ClusteringMetrics_Cluster_FeatureValue { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster_FeatureValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) DeepCopyInto(out *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) { - *out = *in - if in.CategoryCounts != nil { - in, out := &in.CategoryCounts, &out.CategoryCounts - *out = make([]Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) DeepCopy() *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) DeepCopyInto(out *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) { - *out = *in - if in.Category != nil { - in, out := &in.Category, &out.Category - *out = new(string) - **out = **in - } - if in.Count != nil { - in, out := &in.Count, &out.Count - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount. -func (in *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) DeepCopy() *Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount { - if in == nil { - return nil - } - out := new(Model_ClusteringMetrics_Cluster_FeatureValue_CategoricalValue_CategoryCount) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_DataSplitResult) DeepCopyInto(out *Model_DataSplitResult) { - *out = *in - if in.TrainingTable != nil { - in, out := &in.TrainingTable, &out.TrainingTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.EvaluationTable != nil { - in, out := &in.EvaluationTable, &out.EvaluationTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.TestTable != nil { - in, out := &in.TestTable, &out.TestTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_DataSplitResult. -func (in *Model_DataSplitResult) DeepCopy() *Model_DataSplitResult { - if in == nil { - return nil - } - out := new(Model_DataSplitResult) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_DimensionalityReductionMetrics) DeepCopyInto(out *Model_DimensionalityReductionMetrics) { - *out = *in - if in.TotalExplainedVarianceRatio != nil { - in, out := &in.TotalExplainedVarianceRatio, &out.TotalExplainedVarianceRatio - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_DimensionalityReductionMetrics. -func (in *Model_DimensionalityReductionMetrics) DeepCopy() *Model_DimensionalityReductionMetrics { - if in == nil { - return nil - } - out := new(Model_DimensionalityReductionMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_EvaluationMetrics) DeepCopyInto(out *Model_EvaluationMetrics) { - *out = *in - if in.RegressionMetrics != nil { - in, out := &in.RegressionMetrics, &out.RegressionMetrics - *out = new(Model_RegressionMetrics) - (*in).DeepCopyInto(*out) - } - if in.BinaryClassificationMetrics != nil { - in, out := &in.BinaryClassificationMetrics, &out.BinaryClassificationMetrics - *out = new(Model_BinaryClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.MultiClassClassificationMetrics != nil { - in, out := &in.MultiClassClassificationMetrics, &out.MultiClassClassificationMetrics - *out = new(Model_MultiClassClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.ClusteringMetrics != nil { - in, out := &in.ClusteringMetrics, &out.ClusteringMetrics - *out = new(Model_ClusteringMetrics) - (*in).DeepCopyInto(*out) - } - if in.RankingMetrics != nil { - in, out := &in.RankingMetrics, &out.RankingMetrics - *out = new(Model_RankingMetrics) - (*in).DeepCopyInto(*out) - } - if in.ArimaForecastingMetrics != nil { - in, out := &in.ArimaForecastingMetrics, &out.ArimaForecastingMetrics - *out = new(Model_ArimaForecastingMetrics) - (*in).DeepCopyInto(*out) - } - if in.DimensionalityReductionMetrics != nil { - in, out := &in.DimensionalityReductionMetrics, &out.DimensionalityReductionMetrics - *out = new(Model_DimensionalityReductionMetrics) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_EvaluationMetrics. -func (in *Model_EvaluationMetrics) DeepCopy() *Model_EvaluationMetrics { - if in == nil { - return nil - } - out := new(Model_EvaluationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_GlobalExplanation) DeepCopyInto(out *Model_GlobalExplanation) { - *out = *in - if in.Explanations != nil { - in, out := &in.Explanations, &out.Explanations - *out = make([]Model_GlobalExplanation_Explanation, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ClassLabel != nil { - in, out := &in.ClassLabel, &out.ClassLabel - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_GlobalExplanation. -func (in *Model_GlobalExplanation) DeepCopy() *Model_GlobalExplanation { - if in == nil { - return nil - } - out := new(Model_GlobalExplanation) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_GlobalExplanation_Explanation) DeepCopyInto(out *Model_GlobalExplanation_Explanation) { - *out = *in - if in.FeatureName != nil { - in, out := &in.FeatureName, &out.FeatureName - *out = new(string) - **out = **in - } - if in.Attribution != nil { - in, out := &in.Attribution, &out.Attribution - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_GlobalExplanation_Explanation. -func (in *Model_GlobalExplanation_Explanation) DeepCopy() *Model_GlobalExplanation_Explanation { - if in == nil { - return nil - } - out := new(Model_GlobalExplanation_Explanation) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_HparamSearchSpaces) DeepCopyInto(out *Model_HparamSearchSpaces) { - *out = *in - if in.LearnRate != nil { - in, out := &in.LearnRate, &out.LearnRate - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.L1Reg != nil { - in, out := &in.L1Reg, &out.L1Reg - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.L2Reg != nil { - in, out := &in.L2Reg, &out.L2Reg - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.NumClusters != nil { - in, out := &in.NumClusters, &out.NumClusters - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.NumFactors != nil { - in, out := &in.NumFactors, &out.NumFactors - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.HiddenUnits != nil { - in, out := &in.HiddenUnits, &out.HiddenUnits - *out = new(Model_IntArrayHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.BatchSize != nil { - in, out := &in.BatchSize, &out.BatchSize - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.Dropout != nil { - in, out := &in.Dropout, &out.Dropout - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.MaxTreeDepth != nil { - in, out := &in.MaxTreeDepth, &out.MaxTreeDepth - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.Subsample != nil { - in, out := &in.Subsample, &out.Subsample - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.MinSplitLoss != nil { - in, out := &in.MinSplitLoss, &out.MinSplitLoss - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.WalsAlpha != nil { - in, out := &in.WalsAlpha, &out.WalsAlpha - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.BoosterType != nil { - in, out := &in.BoosterType, &out.BoosterType - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.NumParallelTree != nil { - in, out := &in.NumParallelTree, &out.NumParallelTree - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.DartNormalizeType != nil { - in, out := &in.DartNormalizeType, &out.DartNormalizeType - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.TreeMethod != nil { - in, out := &in.TreeMethod, &out.TreeMethod - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.MinTreeChildWeight != nil { - in, out := &in.MinTreeChildWeight, &out.MinTreeChildWeight - *out = new(Model_IntHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ColsampleBytree != nil { - in, out := &in.ColsampleBytree, &out.ColsampleBytree - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ColsampleBylevel != nil { - in, out := &in.ColsampleBylevel, &out.ColsampleBylevel - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ColsampleBynode != nil { - in, out := &in.ColsampleBynode, &out.ColsampleBynode - *out = new(Model_float64HparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.ActivationFn != nil { - in, out := &in.ActivationFn, &out.ActivationFn - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } - if in.Optimizer != nil { - in, out := &in.Optimizer, &out.Optimizer - *out = new(Model_StringHparamSearchSpace) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_HparamSearchSpaces. -func (in *Model_HparamSearchSpaces) DeepCopy() *Model_HparamSearchSpaces { - if in == nil { - return nil - } - out := new(Model_HparamSearchSpaces) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_HparamTuningEnums) DeepCopyInto(out *Model_HparamTuningEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_HparamTuningEnums. -func (in *Model_HparamTuningEnums) DeepCopy() *Model_HparamTuningEnums { - if in == nil { - return nil - } - out := new(Model_HparamTuningEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_HparamTuningTrial) DeepCopyInto(out *Model_HparamTuningTrial) { - *out = *in - if in.TrialID != nil { - in, out := &in.TrialID, &out.TrialID - *out = new(int64) - **out = **in - } - if in.StartTimeMs != nil { - in, out := &in.StartTimeMs, &out.StartTimeMs - *out = new(int64) - **out = **in - } - if in.EndTimeMs != nil { - in, out := &in.EndTimeMs, &out.EndTimeMs - *out = new(int64) - **out = **in - } - if in.Hparams != nil { - in, out := &in.Hparams, &out.Hparams - *out = new(Model_TrainingRun_TrainingOptions) - (*in).DeepCopyInto(*out) - } - if in.EvaluationMetrics != nil { - in, out := &in.EvaluationMetrics, &out.EvaluationMetrics - *out = new(Model_EvaluationMetrics) - (*in).DeepCopyInto(*out) - } - if in.Status != nil { - in, out := &in.Status, &out.Status - *out = new(string) - **out = **in - } - if in.ErrorMessage != nil { - in, out := &in.ErrorMessage, &out.ErrorMessage - *out = new(string) - **out = **in - } - if in.TrainingLoss != nil { - in, out := &in.TrainingLoss, &out.TrainingLoss - *out = new(float64) - **out = **in - } - if in.EvalLoss != nil { - in, out := &in.EvalLoss, &out.EvalLoss - *out = new(float64) - **out = **in - } - if in.HparamTuningEvaluationMetrics != nil { - in, out := &in.HparamTuningEvaluationMetrics, &out.HparamTuningEvaluationMetrics - *out = new(Model_EvaluationMetrics) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_HparamTuningTrial. -func (in *Model_HparamTuningTrial) DeepCopy() *Model_HparamTuningTrial { - if in == nil { - return nil - } - out := new(Model_HparamTuningTrial) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntArrayHparamSearchSpace) DeepCopyInto(out *Model_IntArrayHparamSearchSpace) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]Model_IntArrayHparamSearchSpace_IntArray, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntArrayHparamSearchSpace. -func (in *Model_IntArrayHparamSearchSpace) DeepCopy() *Model_IntArrayHparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_IntArrayHparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntArrayHparamSearchSpace_IntArray) DeepCopyInto(out *Model_IntArrayHparamSearchSpace_IntArray) { - *out = *in - if in.Elements != nil { - in, out := &in.Elements, &out.Elements - *out = make([]int64, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntArrayHparamSearchSpace_IntArray. -func (in *Model_IntArrayHparamSearchSpace_IntArray) DeepCopy() *Model_IntArrayHparamSearchSpace_IntArray { - if in == nil { - return nil - } - out := new(Model_IntArrayHparamSearchSpace_IntArray) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntHparamSearchSpace) DeepCopyInto(out *Model_IntHparamSearchSpace) { - *out = *in - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(Model_IntHparamSearchSpace_IntRange) - (*in).DeepCopyInto(*out) - } - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = new(Model_IntHparamSearchSpace_IntCandidates) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntHparamSearchSpace. -func (in *Model_IntHparamSearchSpace) DeepCopy() *Model_IntHparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_IntHparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntHparamSearchSpace_IntCandidates) DeepCopyInto(out *Model_IntHparamSearchSpace_IntCandidates) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]int64, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntHparamSearchSpace_IntCandidates. -func (in *Model_IntHparamSearchSpace_IntCandidates) DeepCopy() *Model_IntHparamSearchSpace_IntCandidates { - if in == nil { - return nil - } - out := new(Model_IntHparamSearchSpace_IntCandidates) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_IntHparamSearchSpace_IntRange) DeepCopyInto(out *Model_IntHparamSearchSpace_IntRange) { - *out = *in - if in.Min != nil { - in, out := &in.Min, &out.Min - *out = new(int64) - **out = **in - } - if in.Max != nil { - in, out := &in.Max, &out.Max - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_IntHparamSearchSpace_IntRange. -func (in *Model_IntHparamSearchSpace_IntRange) DeepCopy() *Model_IntHparamSearchSpace_IntRange { - if in == nil { - return nil - } - out := new(Model_IntHparamSearchSpace_IntRange) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_KmeansEnums) DeepCopyInto(out *Model_KmeansEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_KmeansEnums. -func (in *Model_KmeansEnums) DeepCopy() *Model_KmeansEnums { - if in == nil { - return nil - } - out := new(Model_KmeansEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_ModelRegistryOptionEnums) DeepCopyInto(out *Model_ModelRegistryOptionEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_ModelRegistryOptionEnums. -func (in *Model_ModelRegistryOptionEnums) DeepCopy() *Model_ModelRegistryOptionEnums { - if in == nil { - return nil - } - out := new(Model_ModelRegistryOptionEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics) DeepCopyInto(out *Model_MultiClassClassificationMetrics) { - *out = *in - if in.AggregateClassificationMetrics != nil { - in, out := &in.AggregateClassificationMetrics, &out.AggregateClassificationMetrics - *out = new(Model_AggregateClassificationMetrics) - (*in).DeepCopyInto(*out) - } - if in.ConfusionMatrixList != nil { - in, out := &in.ConfusionMatrixList, &out.ConfusionMatrixList - *out = make([]Model_MultiClassClassificationMetrics_ConfusionMatrix, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics. -func (in *Model_MultiClassClassificationMetrics) DeepCopy() *Model_MultiClassClassificationMetrics { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix) DeepCopyInto(out *Model_MultiClassClassificationMetrics_ConfusionMatrix) { - *out = *in - if in.ConfidenceThreshold != nil { - in, out := &in.ConfidenceThreshold, &out.ConfidenceThreshold - *out = new(float64) - **out = **in - } - if in.Rows != nil { - in, out := &in.Rows, &out.Rows - *out = make([]Model_MultiClassClassificationMetrics_ConfusionMatrix_Row, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics_ConfusionMatrix. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix) DeepCopy() *Model_MultiClassClassificationMetrics_ConfusionMatrix { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics_ConfusionMatrix) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) DeepCopyInto(out *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) { - *out = *in - if in.PredictedLabel != nil { - in, out := &in.PredictedLabel, &out.PredictedLabel - *out = new(string) - **out = **in - } - if in.ItemCount != nil { - in, out := &in.ItemCount, &out.ItemCount - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) DeepCopy() *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) DeepCopyInto(out *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) { - *out = *in - if in.ActualLabel != nil { - in, out := &in.ActualLabel, &out.ActualLabel - *out = new(string) - **out = **in - } - if in.Entries != nil { - in, out := &in.Entries, &out.Entries - *out = make([]Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_MultiClassClassificationMetrics_ConfusionMatrix_Row. -func (in *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) DeepCopy() *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row { - if in == nil { - return nil - } - out := new(Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_PcaSolverOptionEnums) DeepCopyInto(out *Model_PcaSolverOptionEnums) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_PcaSolverOptionEnums. -func (in *Model_PcaSolverOptionEnums) DeepCopy() *Model_PcaSolverOptionEnums { - if in == nil { - return nil - } - out := new(Model_PcaSolverOptionEnums) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_RankingMetrics) DeepCopyInto(out *Model_RankingMetrics) { - *out = *in - if in.MeanAveragePrecision != nil { - in, out := &in.MeanAveragePrecision, &out.MeanAveragePrecision - *out = new(float64) - **out = **in - } - if in.MeanSquaredError != nil { - in, out := &in.MeanSquaredError, &out.MeanSquaredError - *out = new(float64) - **out = **in - } - if in.NormalizedDiscountedCumulativeGain != nil { - in, out := &in.NormalizedDiscountedCumulativeGain, &out.NormalizedDiscountedCumulativeGain - *out = new(float64) - **out = **in - } - if in.AverageRank != nil { - in, out := &in.AverageRank, &out.AverageRank - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_RankingMetrics. -func (in *Model_RankingMetrics) DeepCopy() *Model_RankingMetrics { - if in == nil { - return nil - } - out := new(Model_RankingMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_RegressionMetrics) DeepCopyInto(out *Model_RegressionMetrics) { - *out = *in - if in.MeanAbsoluteError != nil { - in, out := &in.MeanAbsoluteError, &out.MeanAbsoluteError - *out = new(float64) - **out = **in - } - if in.MeanSquaredError != nil { - in, out := &in.MeanSquaredError, &out.MeanSquaredError - *out = new(float64) - **out = **in - } - if in.MeanSquaredLogError != nil { - in, out := &in.MeanSquaredLogError, &out.MeanSquaredLogError - *out = new(float64) - **out = **in - } - if in.MedianAbsoluteError != nil { - in, out := &in.MedianAbsoluteError, &out.MedianAbsoluteError - *out = new(float64) - **out = **in - } - if in.RSquared != nil { - in, out := &in.RSquared, &out.RSquared - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_RegressionMetrics. -func (in *Model_RegressionMetrics) DeepCopy() *Model_RegressionMetrics { - if in == nil { - return nil - } - out := new(Model_RegressionMetrics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_SeasonalPeriod) DeepCopyInto(out *Model_SeasonalPeriod) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_SeasonalPeriod. -func (in *Model_SeasonalPeriod) DeepCopy() *Model_SeasonalPeriod { - if in == nil { - return nil - } - out := new(Model_SeasonalPeriod) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_StringHparamSearchSpace) DeepCopyInto(out *Model_StringHparamSearchSpace) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_StringHparamSearchSpace. -func (in *Model_StringHparamSearchSpace) DeepCopy() *Model_StringHparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_StringHparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun) DeepCopyInto(out *Model_TrainingRun) { - *out = *in - if in.TrainingOptions != nil { - in, out := &in.TrainingOptions, &out.TrainingOptions - *out = new(Model_TrainingRun_TrainingOptions) - (*in).DeepCopyInto(*out) - } - if in.StartTime != nil { - in, out := &in.StartTime, &out.StartTime - *out = new(string) - **out = **in - } - if in.Results != nil { - in, out := &in.Results, &out.Results - *out = make([]Model_TrainingRun_IterationResult, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.EvaluationMetrics != nil { - in, out := &in.EvaluationMetrics, &out.EvaluationMetrics - *out = new(Model_EvaluationMetrics) - (*in).DeepCopyInto(*out) - } - if in.DataSplitResult != nil { - in, out := &in.DataSplitResult, &out.DataSplitResult - *out = new(Model_DataSplitResult) - (*in).DeepCopyInto(*out) - } - if in.ModelLevelGlobalExplanation != nil { - in, out := &in.ModelLevelGlobalExplanation, &out.ModelLevelGlobalExplanation - *out = new(Model_GlobalExplanation) - (*in).DeepCopyInto(*out) - } - if in.ClassLevelGlobalExplanations != nil { - in, out := &in.ClassLevelGlobalExplanations, &out.ClassLevelGlobalExplanations - *out = make([]Model_GlobalExplanation, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.VertexAiModelID != nil { - in, out := &in.VertexAiModelID, &out.VertexAiModelID - *out = new(string) - **out = **in - } - if in.VertexAiModelVersion != nil { - in, out := &in.VertexAiModelVersion, &out.VertexAiModelVersion - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun. -func (in *Model_TrainingRun) DeepCopy() *Model_TrainingRun { - if in == nil { - return nil - } - out := new(Model_TrainingRun) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult) DeepCopyInto(out *Model_TrainingRun_IterationResult) { - *out = *in - if in.Index != nil { - in, out := &in.Index, &out.Index - *out = new(int32) - **out = **in - } - if in.DurationMs != nil { - in, out := &in.DurationMs, &out.DurationMs - *out = new(int64) - **out = **in - } - if in.TrainingLoss != nil { - in, out := &in.TrainingLoss, &out.TrainingLoss - *out = new(float64) - **out = **in - } - if in.EvalLoss != nil { - in, out := &in.EvalLoss, &out.EvalLoss - *out = new(float64) - **out = **in - } - if in.LearnRate != nil { - in, out := &in.LearnRate, &out.LearnRate - *out = new(float64) - **out = **in - } - if in.ClusterInfos != nil { - in, out := &in.ClusterInfos, &out.ClusterInfos - *out = make([]Model_TrainingRun_IterationResult_ClusterInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ArimaResult != nil { - in, out := &in.ArimaResult, &out.ArimaResult - *out = new(Model_TrainingRun_IterationResult_ArimaResult) - (*in).DeepCopyInto(*out) - } - if in.PrincipalComponentInfos != nil { - in, out := &in.PrincipalComponentInfos, &out.PrincipalComponentInfos - *out = make([]Model_TrainingRun_IterationResult_PrincipalComponentInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult. -func (in *Model_TrainingRun_IterationResult) DeepCopy() *Model_TrainingRun_IterationResult { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ArimaResult) DeepCopyInto(out *Model_TrainingRun_IterationResult_ArimaResult) { - *out = *in - if in.ArimaModelInfo != nil { - in, out := &in.ArimaModelInfo, &out.ArimaModelInfo - *out = make([]Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.SeasonalPeriods != nil { - in, out := &in.SeasonalPeriods, &out.SeasonalPeriods - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ArimaResult. -func (in *Model_TrainingRun_IterationResult_ArimaResult) DeepCopy() *Model_TrainingRun_IterationResult_ArimaResult { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ArimaResult) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) DeepCopyInto(out *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) { - *out = *in - if in.AutoRegressiveCoefficients != nil { - in, out := &in.AutoRegressiveCoefficients, &out.AutoRegressiveCoefficients - *out = make([]float64, len(*in)) - copy(*out, *in) - } - if in.MovingAverageCoefficients != nil { - in, out := &in.MovingAverageCoefficients, &out.MovingAverageCoefficients - *out = make([]float64, len(*in)) - copy(*out, *in) - } - if in.InterceptCoefficient != nil { - in, out := &in.InterceptCoefficient, &out.InterceptCoefficient - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) DeepCopy() *Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) DeepCopyInto(out *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) { - *out = *in - if in.NonSeasonalOrder != nil { - in, out := &in.NonSeasonalOrder, &out.NonSeasonalOrder - *out = new(Model_ArimaOrder) - (*in).DeepCopyInto(*out) - } - if in.ArimaCoefficients != nil { - in, out := &in.ArimaCoefficients, &out.ArimaCoefficients - *out = new(Model_TrainingRun_IterationResult_ArimaResult_ArimaCoefficients) - (*in).DeepCopyInto(*out) - } - if in.ArimaFittingMetrics != nil { - in, out := &in.ArimaFittingMetrics, &out.ArimaFittingMetrics - *out = new(Model_ArimaFittingMetrics) - (*in).DeepCopyInto(*out) - } - if in.HasDrift != nil { - in, out := &in.HasDrift, &out.HasDrift - *out = new(bool) - **out = **in - } - if in.TimeSeriesID != nil { - in, out := &in.TimeSeriesID, &out.TimeSeriesID - *out = new(string) - **out = **in - } - if in.TimeSeriesIds != nil { - in, out := &in.TimeSeriesIds, &out.TimeSeriesIds - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.SeasonalPeriods != nil { - in, out := &in.SeasonalPeriods, &out.SeasonalPeriods - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.HasHolidayEffect != nil { - in, out := &in.HasHolidayEffect, &out.HasHolidayEffect - *out = new(bool) - **out = **in - } - if in.HasSpikesAndDips != nil { - in, out := &in.HasSpikesAndDips, &out.HasSpikesAndDips - *out = new(bool) - **out = **in - } - if in.HasStepChanges != nil { - in, out := &in.HasStepChanges, &out.HasStepChanges - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo. -func (in *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) DeepCopy() *Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ArimaResult_ArimaModelInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_ClusterInfo) DeepCopyInto(out *Model_TrainingRun_IterationResult_ClusterInfo) { - *out = *in - if in.CentroidID != nil { - in, out := &in.CentroidID, &out.CentroidID - *out = new(int64) - **out = **in - } - if in.ClusterRadius != nil { - in, out := &in.ClusterRadius, &out.ClusterRadius - *out = new(float64) - **out = **in - } - if in.ClusterSize != nil { - in, out := &in.ClusterSize, &out.ClusterSize - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_ClusterInfo. -func (in *Model_TrainingRun_IterationResult_ClusterInfo) DeepCopy() *Model_TrainingRun_IterationResult_ClusterInfo { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_ClusterInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_IterationResult_PrincipalComponentInfo) DeepCopyInto(out *Model_TrainingRun_IterationResult_PrincipalComponentInfo) { - *out = *in - if in.PrincipalComponentID != nil { - in, out := &in.PrincipalComponentID, &out.PrincipalComponentID - *out = new(int64) - **out = **in - } - if in.ExplainedVariance != nil { - in, out := &in.ExplainedVariance, &out.ExplainedVariance - *out = new(float64) - **out = **in - } - if in.ExplainedVarianceRatio != nil { - in, out := &in.ExplainedVarianceRatio, &out.ExplainedVarianceRatio - *out = new(float64) - **out = **in - } - if in.CumulativeExplainedVarianceRatio != nil { - in, out := &in.CumulativeExplainedVarianceRatio, &out.CumulativeExplainedVarianceRatio - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_IterationResult_PrincipalComponentInfo. -func (in *Model_TrainingRun_IterationResult_PrincipalComponentInfo) DeepCopy() *Model_TrainingRun_IterationResult_PrincipalComponentInfo { - if in == nil { - return nil - } - out := new(Model_TrainingRun_IterationResult_PrincipalComponentInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_TrainingRun_TrainingOptions) DeepCopyInto(out *Model_TrainingRun_TrainingOptions) { - *out = *in - if in.MaxIterations != nil { - in, out := &in.MaxIterations, &out.MaxIterations - *out = new(int64) - **out = **in - } - if in.LossType != nil { - in, out := &in.LossType, &out.LossType - *out = new(string) - **out = **in - } - if in.LearnRate != nil { - in, out := &in.LearnRate, &out.LearnRate - *out = new(float64) - **out = **in - } - if in.L1Regularization != nil { - in, out := &in.L1Regularization, &out.L1Regularization - *out = new(float64) - **out = **in - } - if in.L2Regularization != nil { - in, out := &in.L2Regularization, &out.L2Regularization - *out = new(float64) - **out = **in - } - if in.MinRelativeProgress != nil { - in, out := &in.MinRelativeProgress, &out.MinRelativeProgress - *out = new(float64) - **out = **in - } - if in.WarmStart != nil { - in, out := &in.WarmStart, &out.WarmStart - *out = new(bool) - **out = **in - } - if in.EarlyStop != nil { - in, out := &in.EarlyStop, &out.EarlyStop - *out = new(bool) - **out = **in - } - if in.InputLabelColumns != nil { - in, out := &in.InputLabelColumns, &out.InputLabelColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DataSplitMethod != nil { - in, out := &in.DataSplitMethod, &out.DataSplitMethod - *out = new(string) - **out = **in - } - if in.DataSplitEvalFraction != nil { - in, out := &in.DataSplitEvalFraction, &out.DataSplitEvalFraction - *out = new(float64) - **out = **in - } - if in.DataSplitColumn != nil { - in, out := &in.DataSplitColumn, &out.DataSplitColumn - *out = new(string) - **out = **in - } - if in.LearnRateStrategy != nil { - in, out := &in.LearnRateStrategy, &out.LearnRateStrategy - *out = new(string) - **out = **in - } - if in.InitialLearnRate != nil { - in, out := &in.InitialLearnRate, &out.InitialLearnRate - *out = new(float64) - **out = **in - } - if in.UserColumn != nil { - in, out := &in.UserColumn, &out.UserColumn - *out = new(string) - **out = **in - } - if in.ItemColumn != nil { - in, out := &in.ItemColumn, &out.ItemColumn - *out = new(string) - **out = **in - } - if in.DistanceType != nil { - in, out := &in.DistanceType, &out.DistanceType - *out = new(string) - **out = **in - } - if in.NumClusters != nil { - in, out := &in.NumClusters, &out.NumClusters - *out = new(int64) - **out = **in - } - if in.ModelUri != nil { - in, out := &in.ModelUri, &out.ModelUri - *out = new(string) - **out = **in - } - if in.OptimizationStrategy != nil { - in, out := &in.OptimizationStrategy, &out.OptimizationStrategy - *out = new(string) - **out = **in - } - if in.HiddenUnits != nil { - in, out := &in.HiddenUnits, &out.HiddenUnits - *out = make([]int64, len(*in)) - copy(*out, *in) - } - if in.BatchSize != nil { - in, out := &in.BatchSize, &out.BatchSize - *out = new(int64) - **out = **in - } - if in.Dropout != nil { - in, out := &in.Dropout, &out.Dropout - *out = new(float64) - **out = **in - } - if in.MaxTreeDepth != nil { - in, out := &in.MaxTreeDepth, &out.MaxTreeDepth - *out = new(int64) - **out = **in - } - if in.Subsample != nil { - in, out := &in.Subsample, &out.Subsample - *out = new(float64) - **out = **in - } - if in.MinSplitLoss != nil { - in, out := &in.MinSplitLoss, &out.MinSplitLoss - *out = new(float64) - **out = **in - } - if in.BoosterType != nil { - in, out := &in.BoosterType, &out.BoosterType - *out = new(string) - **out = **in - } - if in.NumParallelTree != nil { - in, out := &in.NumParallelTree, &out.NumParallelTree - *out = new(int64) - **out = **in - } - if in.DartNormalizeType != nil { - in, out := &in.DartNormalizeType, &out.DartNormalizeType - *out = new(string) - **out = **in - } - if in.TreeMethod != nil { - in, out := &in.TreeMethod, &out.TreeMethod - *out = new(string) - **out = **in - } - if in.MinTreeChildWeight != nil { - in, out := &in.MinTreeChildWeight, &out.MinTreeChildWeight - *out = new(int64) - **out = **in - } - if in.ColsampleBytree != nil { - in, out := &in.ColsampleBytree, &out.ColsampleBytree - *out = new(float64) - **out = **in - } - if in.ColsampleBylevel != nil { - in, out := &in.ColsampleBylevel, &out.ColsampleBylevel - *out = new(float64) - **out = **in - } - if in.ColsampleBynode != nil { - in, out := &in.ColsampleBynode, &out.ColsampleBynode - *out = new(float64) - **out = **in - } - if in.NumFactors != nil { - in, out := &in.NumFactors, &out.NumFactors - *out = new(int64) - **out = **in - } - if in.FeedbackType != nil { - in, out := &in.FeedbackType, &out.FeedbackType - *out = new(string) - **out = **in - } - if in.WalsAlpha != nil { - in, out := &in.WalsAlpha, &out.WalsAlpha - *out = new(float64) - **out = **in - } - if in.KmeansInitializationMethod != nil { - in, out := &in.KmeansInitializationMethod, &out.KmeansInitializationMethod - *out = new(string) - **out = **in - } - if in.KmeansInitializationColumn != nil { - in, out := &in.KmeansInitializationColumn, &out.KmeansInitializationColumn - *out = new(string) - **out = **in - } - if in.TimeSeriesTimestampColumn != nil { - in, out := &in.TimeSeriesTimestampColumn, &out.TimeSeriesTimestampColumn - *out = new(string) - **out = **in - } - if in.TimeSeriesDataColumn != nil { - in, out := &in.TimeSeriesDataColumn, &out.TimeSeriesDataColumn - *out = new(string) - **out = **in - } - if in.AutoArima != nil { - in, out := &in.AutoArima, &out.AutoArima - *out = new(bool) - **out = **in - } - if in.NonSeasonalOrder != nil { - in, out := &in.NonSeasonalOrder, &out.NonSeasonalOrder - *out = new(Model_ArimaOrder) - (*in).DeepCopyInto(*out) - } - if in.DataFrequency != nil { - in, out := &in.DataFrequency, &out.DataFrequency - *out = new(string) - **out = **in - } - if in.CalculatePValues != nil { - in, out := &in.CalculatePValues, &out.CalculatePValues - *out = new(bool) - **out = **in - } - if in.IncludeDrift != nil { - in, out := &in.IncludeDrift, &out.IncludeDrift - *out = new(bool) - **out = **in - } - if in.HolidayRegion != nil { - in, out := &in.HolidayRegion, &out.HolidayRegion - *out = new(string) - **out = **in - } - if in.HolidayRegions != nil { - in, out := &in.HolidayRegions, &out.HolidayRegions - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.TimeSeriesIDColumn != nil { - in, out := &in.TimeSeriesIDColumn, &out.TimeSeriesIDColumn - *out = new(string) - **out = **in - } - if in.TimeSeriesIDColumns != nil { - in, out := &in.TimeSeriesIDColumns, &out.TimeSeriesIDColumns - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.Horizon != nil { - in, out := &in.Horizon, &out.Horizon - *out = new(int64) - **out = **in - } - if in.AutoArimaMaxOrder != nil { - in, out := &in.AutoArimaMaxOrder, &out.AutoArimaMaxOrder - *out = new(int64) - **out = **in - } - if in.AutoArimaMinOrder != nil { - in, out := &in.AutoArimaMinOrder, &out.AutoArimaMinOrder - *out = new(int64) - **out = **in - } - if in.NumTrials != nil { - in, out := &in.NumTrials, &out.NumTrials - *out = new(int64) - **out = **in - } - if in.MaxParallelTrials != nil { - in, out := &in.MaxParallelTrials, &out.MaxParallelTrials - *out = new(int64) - **out = **in - } - if in.HparamTuningObjectives != nil { - in, out := &in.HparamTuningObjectives, &out.HparamTuningObjectives - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DecomposeTimeSeries != nil { - in, out := &in.DecomposeTimeSeries, &out.DecomposeTimeSeries - *out = new(bool) - **out = **in - } - if in.CleanSpikesAndDips != nil { - in, out := &in.CleanSpikesAndDips, &out.CleanSpikesAndDips - *out = new(bool) - **out = **in - } - if in.AdjustStepChanges != nil { - in, out := &in.AdjustStepChanges, &out.AdjustStepChanges - *out = new(bool) - **out = **in - } - if in.EnableGlobalExplain != nil { - in, out := &in.EnableGlobalExplain, &out.EnableGlobalExplain - *out = new(bool) - **out = **in - } - if in.SampledShapleyNumPaths != nil { - in, out := &in.SampledShapleyNumPaths, &out.SampledShapleyNumPaths - *out = new(int64) - **out = **in - } - if in.IntegratedGradientsNumSteps != nil { - in, out := &in.IntegratedGradientsNumSteps, &out.IntegratedGradientsNumSteps - *out = new(int64) - **out = **in - } - if in.CategoryEncodingMethod != nil { - in, out := &in.CategoryEncodingMethod, &out.CategoryEncodingMethod - *out = new(string) - **out = **in - } - if in.TfVersion != nil { - in, out := &in.TfVersion, &out.TfVersion - *out = new(string) - **out = **in - } - if in.ColorSpace != nil { - in, out := &in.ColorSpace, &out.ColorSpace - *out = new(string) - **out = **in - } - if in.InstanceWeightColumn != nil { - in, out := &in.InstanceWeightColumn, &out.InstanceWeightColumn - *out = new(string) - **out = **in - } - if in.TrendSmoothingWindowSize != nil { - in, out := &in.TrendSmoothingWindowSize, &out.TrendSmoothingWindowSize - *out = new(int64) - **out = **in - } - if in.TimeSeriesLengthFraction != nil { - in, out := &in.TimeSeriesLengthFraction, &out.TimeSeriesLengthFraction - *out = new(float64) - **out = **in - } - if in.MinTimeSeriesLength != nil { - in, out := &in.MinTimeSeriesLength, &out.MinTimeSeriesLength - *out = new(int64) - **out = **in - } - if in.MaxTimeSeriesLength != nil { - in, out := &in.MaxTimeSeriesLength, &out.MaxTimeSeriesLength - *out = new(int64) - **out = **in - } - if in.XgboostVersion != nil { - in, out := &in.XgboostVersion, &out.XgboostVersion - *out = new(string) - **out = **in - } - if in.ApproxGlobalFeatureContrib != nil { - in, out := &in.ApproxGlobalFeatureContrib, &out.ApproxGlobalFeatureContrib - *out = new(bool) - **out = **in - } - if in.FitIntercept != nil { - in, out := &in.FitIntercept, &out.FitIntercept - *out = new(bool) - **out = **in - } - if in.NumPrincipalComponents != nil { - in, out := &in.NumPrincipalComponents, &out.NumPrincipalComponents - *out = new(int64) - **out = **in - } - if in.PcaExplainedVarianceRatio != nil { - in, out := &in.PcaExplainedVarianceRatio, &out.PcaExplainedVarianceRatio - *out = new(float64) - **out = **in - } - if in.ScaleFeatures != nil { - in, out := &in.ScaleFeatures, &out.ScaleFeatures - *out = new(bool) - **out = **in - } - if in.PcaSolver != nil { - in, out := &in.PcaSolver, &out.PcaSolver - *out = new(string) - **out = **in - } - if in.AutoClassWeights != nil { - in, out := &in.AutoClassWeights, &out.AutoClassWeights - *out = new(bool) - **out = **in - } - if in.ActivationFn != nil { - in, out := &in.ActivationFn, &out.ActivationFn - *out = new(string) - **out = **in - } - if in.Optimizer != nil { - in, out := &in.Optimizer, &out.Optimizer - *out = new(string) - **out = **in - } - if in.BudgetHours != nil { - in, out := &in.BudgetHours, &out.BudgetHours - *out = new(float64) - **out = **in - } - if in.StandardizeFeatures != nil { - in, out := &in.StandardizeFeatures, &out.StandardizeFeatures - *out = new(bool) - **out = **in - } - if in.L1RegActivation != nil { - in, out := &in.L1RegActivation, &out.L1RegActivation - *out = new(float64) - **out = **in - } - if in.ModelRegistry != nil { - in, out := &in.ModelRegistry, &out.ModelRegistry - *out = new(string) - **out = **in - } - if in.VertexAiModelVersionAliases != nil { - in, out := &in.VertexAiModelVersionAliases, &out.VertexAiModelVersionAliases - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_TrainingRun_TrainingOptions. -func (in *Model_TrainingRun_TrainingOptions) DeepCopy() *Model_TrainingRun_TrainingOptions { - if in == nil { - return nil - } - out := new(Model_TrainingRun_TrainingOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_float64HparamSearchSpace) DeepCopyInto(out *Model_float64HparamSearchSpace) { - *out = *in - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(Model_float64HparamSearchSpace_float64Range) - (*in).DeepCopyInto(*out) - } - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = new(Model_float64HparamSearchSpace_float64Candidates) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_float64HparamSearchSpace. -func (in *Model_float64HparamSearchSpace) DeepCopy() *Model_float64HparamSearchSpace { - if in == nil { - return nil - } - out := new(Model_float64HparamSearchSpace) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_float64HparamSearchSpace_float64Candidates) DeepCopyInto(out *Model_float64HparamSearchSpace_float64Candidates) { - *out = *in - if in.Candidates != nil { - in, out := &in.Candidates, &out.Candidates - *out = make([]float64, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_float64HparamSearchSpace_float64Candidates. -func (in *Model_float64HparamSearchSpace_float64Candidates) DeepCopy() *Model_float64HparamSearchSpace_float64Candidates { - if in == nil { - return nil - } - out := new(Model_float64HparamSearchSpace_float64Candidates) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Model_float64HparamSearchSpace_float64Range) DeepCopyInto(out *Model_float64HparamSearchSpace_float64Range) { - *out = *in - if in.Min != nil { - in, out := &in.Min, &out.Min - *out = new(float64) - **out = **in - } - if in.Max != nil { - in, out := &in.Max, &out.Max - *out = new(float64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Model_float64HparamSearchSpace_float64Range. -func (in *Model_float64HparamSearchSpace_float64Range) DeepCopy() *Model_float64HparamSearchSpace_float64Range { - if in == nil { - return nil - } - out := new(Model_float64HparamSearchSpace_float64Range) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ParquetOptions) DeepCopyInto(out *ParquetOptions) { - *out = *in - if in.EnumAsString != nil { - in, out := &in.EnumAsString, &out.EnumAsString - *out = new(bool) - **out = **in - } - if in.EnableListInference != nil { - in, out := &in.EnableListInference, &out.EnableListInference - *out = new(bool) - **out = **in - } - if in.MapTargetType != nil { - in, out := &in.MapTargetType, &out.MapTargetType - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ParquetOptions. -func (in *ParquetOptions) DeepCopy() *ParquetOptions { - if in == nil { - return nil - } - out := new(ParquetOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitionSkew) DeepCopyInto(out *PartitionSkew) { - *out = *in - if in.SkewSources != nil { - in, out := &in.SkewSources, &out.SkewSources - *out = make([]PartitionSkew_SkewSource, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionSkew. -func (in *PartitionSkew) DeepCopy() *PartitionSkew { - if in == nil { - return nil - } - out := new(PartitionSkew) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitionSkew_SkewSource) DeepCopyInto(out *PartitionSkew_SkewSource) { - *out = *in - if in.StageID != nil { - in, out := &in.StageID, &out.StageID - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionSkew_SkewSource. -func (in *PartitionSkew_SkewSource) DeepCopy() *PartitionSkew_SkewSource { - if in == nil { - return nil - } - out := new(PartitionSkew_SkewSource) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitionedColumn) DeepCopyInto(out *PartitionedColumn) { - *out = *in - if in.Field != nil { - in, out := &in.Field, &out.Field - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitionedColumn. -func (in *PartitionedColumn) DeepCopy() *PartitionedColumn { - if in == nil { - return nil - } - out := new(PartitionedColumn) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PartitioningDefinition) DeepCopyInto(out *PartitioningDefinition) { - *out = *in - if in.PartitionedColumn != nil { - in, out := &in.PartitionedColumn, &out.PartitionedColumn - *out = make([]PartitionedColumn, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PartitioningDefinition. -func (in *PartitioningDefinition) DeepCopy() *PartitioningDefinition { - if in == nil { - return nil - } - out := new(PartitioningDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PerformanceInsights) DeepCopyInto(out *PerformanceInsights) { - *out = *in - if in.AvgPreviousExecutionMs != nil { - in, out := &in.AvgPreviousExecutionMs, &out.AvgPreviousExecutionMs - *out = new(int64) - **out = **in - } - if in.StagePerformanceStandaloneInsights != nil { - in, out := &in.StagePerformanceStandaloneInsights, &out.StagePerformanceStandaloneInsights - *out = make([]StagePerformanceStandaloneInsight, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.StagePerformanceChangeInsights != nil { - in, out := &in.StagePerformanceChangeInsights, &out.StagePerformanceChangeInsights - *out = make([]StagePerformanceChangeInsight, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PerformanceInsights. -func (in *PerformanceInsights) DeepCopy() *PerformanceInsights { - if in == nil { - return nil - } - out := new(PerformanceInsights) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PrimaryKey) DeepCopyInto(out *PrimaryKey) { - *out = *in - if in.Columns != nil { - in, out := &in.Columns, &out.Columns - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrimaryKey. -func (in *PrimaryKey) DeepCopy() *PrimaryKey { - if in == nil { - return nil - } - out := new(PrimaryKey) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *PrivacyPolicy) DeepCopyInto(out *PrivacyPolicy) { - *out = *in - if in.AggregationThresholdPolicy != nil { - in, out := &in.AggregationThresholdPolicy, &out.AggregationThresholdPolicy - *out = new(AggregationThresholdPolicy) - (*in).DeepCopyInto(*out) - } - if in.DifferentialPrivacyPolicy != nil { - in, out := &in.DifferentialPrivacyPolicy, &out.DifferentialPrivacyPolicy - *out = new(DifferentialPrivacyPolicy) - (*in).DeepCopyInto(*out) - } - if in.JoinRestrictionPolicy != nil { - in, out := &in.JoinRestrictionPolicy, &out.JoinRestrictionPolicy - *out = new(JoinRestrictionPolicy) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PrivacyPolicy. -func (in *PrivacyPolicy) DeepCopy() *PrivacyPolicy { - if in == nil { - return nil - } - out := new(PrivacyPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryInfo) DeepCopyInto(out *QueryInfo) { - *out = *in - if in.OptimizationDetails != nil { - in, out := &in.OptimizationDetails, &out.OptimizationDetails - *out = new(google_protobuf_Struct) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryInfo. -func (in *QueryInfo) DeepCopy() *QueryInfo { - if in == nil { - return nil - } - out := new(QueryInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameter) DeepCopyInto(out *QueryParameter) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ParameterType != nil { - in, out := &in.ParameterType, &out.ParameterType - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } - if in.ParameterValue != nil { - in, out := &in.ParameterValue, &out.ParameterValue - *out = new(QueryParameterValue) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameter. -func (in *QueryParameter) DeepCopy() *QueryParameter { - if in == nil { - return nil - } - out := new(QueryParameter) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameterStructType) DeepCopyInto(out *QueryParameterStructType) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameterStructType. -func (in *QueryParameterStructType) DeepCopy() *QueryParameterStructType { - if in == nil { - return nil - } - out := new(QueryParameterStructType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameterType) DeepCopyInto(out *QueryParameterType) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.ArrayType != nil { - in, out := &in.ArrayType, &out.ArrayType - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } - if in.StructTypes != nil { - in, out := &in.StructTypes, &out.StructTypes - *out = make([]QueryParameterStructType, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.RangeElementType != nil { - in, out := &in.RangeElementType, &out.RangeElementType - *out = new(QueryParameterType) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameterType. -func (in *QueryParameterType) DeepCopy() *QueryParameterType { - if in == nil { - return nil - } - out := new(QueryParameterType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryParameterValue) DeepCopyInto(out *QueryParameterValue) { - *out = *in - if in.Value != nil { - in, out := &in.Value, &out.Value - *out = new(string) - **out = **in - } - if in.ArrayValues != nil { - in, out := &in.ArrayValues, &out.ArrayValues - *out = make([]QueryParameterValue, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.RangeValue != nil { - in, out := &in.RangeValue, &out.RangeValue - *out = new(RangeValue) - (*in).DeepCopyInto(*out) - } - if in.AltStructValues != nil { - in, out := &in.AltStructValues, &out.AltStructValues - *out = make([]google_protobuf_Value, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryParameterValue. -func (in *QueryParameterValue) DeepCopy() *QueryParameterValue { - if in == nil { - return nil - } - out := new(QueryParameterValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *QueryTimelineSample) DeepCopyInto(out *QueryTimelineSample) { - *out = *in - if in.ElapsedMs != nil { - in, out := &in.ElapsedMs, &out.ElapsedMs - *out = new(int64) - **out = **in - } - if in.TotalSlotMs != nil { - in, out := &in.TotalSlotMs, &out.TotalSlotMs - *out = new(int64) - **out = **in - } - if in.PendingUnits != nil { - in, out := &in.PendingUnits, &out.PendingUnits - *out = new(int64) - **out = **in - } - if in.CompletedUnits != nil { - in, out := &in.CompletedUnits, &out.CompletedUnits - *out = new(int64) - **out = **in - } - if in.ActiveUnits != nil { - in, out := &in.ActiveUnits, &out.ActiveUnits - *out = new(int64) - **out = **in - } - if in.EstimatedRunnableUnits != nil { - in, out := &in.EstimatedRunnableUnits, &out.EstimatedRunnableUnits - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryTimelineSample. -func (in *QueryTimelineSample) DeepCopy() *QueryTimelineSample { - if in == nil { - return nil - } - out := new(QueryTimelineSample) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RangePartitioning) DeepCopyInto(out *RangePartitioning) { - *out = *in - if in.Field != nil { - in, out := &in.Field, &out.Field - *out = new(string) - **out = **in - } - if in.Range != nil { - in, out := &in.Range, &out.Range - *out = new(RangePartitioning_Range) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangePartitioning. -func (in *RangePartitioning) DeepCopy() *RangePartitioning { - if in == nil { - return nil - } - out := new(RangePartitioning) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RangePartitioning_Range) DeepCopyInto(out *RangePartitioning_Range) { - *out = *in - if in.Start != nil { - in, out := &in.Start, &out.Start - *out = new(string) - **out = **in - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(string) - **out = **in - } - if in.Interval != nil { - in, out := &in.Interval, &out.Interval - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangePartitioning_Range. -func (in *RangePartitioning_Range) DeepCopy() *RangePartitioning_Range { - if in == nil { - return nil - } - out := new(RangePartitioning_Range) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RangeValue) DeepCopyInto(out *RangeValue) { - *out = *in - if in.Start != nil { - in, out := &in.Start, &out.Start - *out = new(QueryParameterValue) - (*in).DeepCopyInto(*out) - } - if in.End != nil { - in, out := &in.End, &out.End - *out = new(QueryParameterValue) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RangeValue. -func (in *RangeValue) DeepCopy() *RangeValue { - if in == nil { - return nil - } - out := new(RangeValue) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RemoteModelInfo) DeepCopyInto(out *RemoteModelInfo) { - *out = *in - if in.Endpoint != nil { - in, out := &in.Endpoint, &out.Endpoint - *out = new(string) - **out = **in - } - if in.RemoteServiceType != nil { - in, out := &in.RemoteServiceType, &out.RemoteServiceType - *out = new(string) - **out = **in - } - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } - if in.MaxBatchingRows != nil { - in, out := &in.MaxBatchingRows, &out.MaxBatchingRows - *out = new(int64) - **out = **in - } - if in.RemoteModelVersion != nil { - in, out := &in.RemoteModelVersion, &out.RemoteModelVersion - *out = new(string) - **out = **in - } - if in.SpeechRecognizer != nil { - in, out := &in.SpeechRecognizer, &out.SpeechRecognizer - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RemoteModelInfo. -func (in *RemoteModelInfo) DeepCopy() *RemoteModelInfo { - if in == nil { - return nil - } - out := new(RemoteModelInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RestrictionConfig) DeepCopyInto(out *RestrictionConfig) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictionConfig. -func (in *RestrictionConfig) DeepCopy() *RestrictionConfig { - if in == nil { - return nil - } - out := new(RestrictionConfig) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Routine) DeepCopyInto(out *Routine) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.RoutineReference != nil { - in, out := &in.RoutineReference, &out.RoutineReference - *out = new(RoutineReference) - (*in).DeepCopyInto(*out) - } - if in.RoutineType != nil { - in, out := &in.RoutineType, &out.RoutineType - *out = new(string) - **out = **in - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(int64) - **out = **in - } - if in.Language != nil { - in, out := &in.Language, &out.Language - *out = new(string) - **out = **in - } - if in.Arguments != nil { - in, out := &in.Arguments, &out.Arguments - *out = make([]Routine_Argument, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ReturnType != nil { - in, out := &in.ReturnType, &out.ReturnType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.ReturnTableType != nil { - in, out := &in.ReturnTableType, &out.ReturnTableType - *out = new(StandardSqlTableType) - (*in).DeepCopyInto(*out) - } - if in.ImportedLibraries != nil { - in, out := &in.ImportedLibraries, &out.ImportedLibraries - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.DefinitionBody != nil { - in, out := &in.DefinitionBody, &out.DefinitionBody - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.DeterminismLevel != nil { - in, out := &in.DeterminismLevel, &out.DeterminismLevel - *out = new(string) - **out = **in - } - if in.SecurityMode != nil { - in, out := &in.SecurityMode, &out.SecurityMode - *out = new(string) - **out = **in - } - if in.StrictMode != nil { - in, out := &in.StrictMode, &out.StrictMode - *out = new(bool) - **out = **in - } - if in.RemoteFunctionOptions != nil { - in, out := &in.RemoteFunctionOptions, &out.RemoteFunctionOptions - *out = new(Routine_RemoteFunctionOptions) - (*in).DeepCopyInto(*out) - } - if in.SparkOptions != nil { - in, out := &in.SparkOptions, &out.SparkOptions - *out = new(SparkOptions) - (*in).DeepCopyInto(*out) - } - if in.DataGovernanceType != nil { - in, out := &in.DataGovernanceType, &out.DataGovernanceType - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine. -func (in *Routine) DeepCopy() *Routine { - if in == nil { - return nil - } - out := new(Routine) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RoutineReference) DeepCopyInto(out *RoutineReference) { - *out = *in - if in.ProjectId != nil { - in, out := &in.ProjectId, &out.ProjectId - *out = new(string) - **out = **in - } - if in.DatasetId != nil { - in, out := &in.DatasetId, &out.DatasetId - *out = new(string) - **out = **in - } - if in.RoutineId != nil { - in, out := &in.RoutineId, &out.RoutineId - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineReference. -func (in *RoutineReference) DeepCopy() *RoutineReference { - if in == nil { - return nil - } - out := new(RoutineReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Routine_Argument) DeepCopyInto(out *Routine_Argument) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.ArgumentKind != nil { - in, out := &in.ArgumentKind, &out.ArgumentKind - *out = new(string) - **out = **in - } - if in.Mode != nil { - in, out := &in.Mode, &out.Mode - *out = new(string) - **out = **in - } - if in.DataType != nil { - in, out := &in.DataType, &out.DataType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.IsAggregate != nil { - in, out := &in.IsAggregate, &out.IsAggregate - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine_Argument. -func (in *Routine_Argument) DeepCopy() *Routine_Argument { - if in == nil { - return nil - } - out := new(Routine_Argument) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Routine_RemoteFunctionOptions) DeepCopyInto(out *Routine_RemoteFunctionOptions) { - *out = *in - if in.Endpoint != nil { - in, out := &in.Endpoint, &out.Endpoint - *out = new(string) - **out = **in - } - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } - if in.UserDefinedContext != nil { - in, out := &in.UserDefinedContext, &out.UserDefinedContext - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.MaxBatchingRows != nil { - in, out := &in.MaxBatchingRows, &out.MaxBatchingRows - *out = new(int64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Routine_RemoteFunctionOptions. -func (in *Routine_RemoteFunctionOptions) DeepCopy() *Routine_RemoteFunctionOptions { - if in == nil { - return nil - } - out := new(Routine_RemoteFunctionOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RowAccessPolicy) DeepCopyInto(out *RowAccessPolicy) { - *out = *in - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.RowAccessPolicyReference != nil { - in, out := &in.RowAccessPolicyReference, &out.RowAccessPolicyReference - *out = new(RowAccessPolicyReference) - (*in).DeepCopyInto(*out) - } - if in.FilterPredicate != nil { - in, out := &in.FilterPredicate, &out.FilterPredicate - *out = new(string) - **out = **in - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(string) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowAccessPolicy. -func (in *RowAccessPolicy) DeepCopy() *RowAccessPolicy { - if in == nil { - return nil - } - out := new(RowAccessPolicy) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RowAccessPolicyReference) DeepCopyInto(out *RowAccessPolicyReference) { - *out = *in - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } - if in.DatasetID != nil { - in, out := &in.DatasetID, &out.DatasetID - *out = new(string) - **out = **in - } - if in.TableID != nil { - in, out := &in.TableID, &out.TableID - *out = new(string) - **out = **in - } - if in.PolicyID != nil { - in, out := &in.PolicyID, &out.PolicyID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowAccessPolicyReference. -func (in *RowAccessPolicyReference) DeepCopy() *RowAccessPolicyReference { - if in == nil { - return nil - } - out := new(RowAccessPolicyReference) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *RowLevelSecurityStatistics) DeepCopyInto(out *RowLevelSecurityStatistics) { - *out = *in - if in.RowLevelSecurityApplied != nil { - in, out := &in.RowLevelSecurityApplied, &out.RowLevelSecurityApplied - *out = new(bool) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RowLevelSecurityStatistics. -func (in *RowLevelSecurityStatistics) DeepCopy() *RowLevelSecurityStatistics { - if in == nil { - return nil - } - out := new(RowLevelSecurityStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScriptOptions) DeepCopyInto(out *ScriptOptions) { - *out = *in - if in.StatementTimeoutMs != nil { - in, out := &in.StatementTimeoutMs, &out.StatementTimeoutMs - *out = new(int64) - **out = **in - } - if in.StatementByteBudget != nil { - in, out := &in.StatementByteBudget, &out.StatementByteBudget - *out = new(int64) - **out = **in - } - if in.KeyResultStatement != nil { - in, out := &in.KeyResultStatement, &out.KeyResultStatement - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptOptions. -func (in *ScriptOptions) DeepCopy() *ScriptOptions { - if in == nil { - return nil - } - out := new(ScriptOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScriptStatistics) DeepCopyInto(out *ScriptStatistics) { - *out = *in - if in.EvaluationKind != nil { - in, out := &in.EvaluationKind, &out.EvaluationKind - *out = new(string) - **out = **in - } - if in.StackFrames != nil { - in, out := &in.StackFrames, &out.StackFrames - *out = make([]ScriptStatistics_ScriptStackFrame, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptStatistics. -func (in *ScriptStatistics) DeepCopy() *ScriptStatistics { - if in == nil { - return nil - } - out := new(ScriptStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ScriptStatistics_ScriptStackFrame) DeepCopyInto(out *ScriptStatistics_ScriptStackFrame) { - *out = *in - if in.StartLine != nil { - in, out := &in.StartLine, &out.StartLine - *out = new(int32) - **out = **in - } - if in.StartColumn != nil { - in, out := &in.StartColumn, &out.StartColumn - *out = new(int32) - **out = **in - } - if in.EndLine != nil { - in, out := &in.EndLine, &out.EndLine - *out = new(int32) - **out = **in - } - if in.EndColumn != nil { - in, out := &in.EndColumn, &out.EndColumn - *out = new(int32) - **out = **in - } - if in.ProcedureID != nil { - in, out := &in.ProcedureID, &out.ProcedureID - *out = new(string) - **out = **in - } - if in.Text != nil { - in, out := &in.Text, &out.Text - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptStatistics_ScriptStackFrame. -func (in *ScriptStatistics_ScriptStackFrame) DeepCopy() *ScriptStatistics_ScriptStackFrame { - if in == nil { - return nil - } - out := new(ScriptStatistics_ScriptStackFrame) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SearchStatistics) DeepCopyInto(out *SearchStatistics) { - *out = *in - if in.IndexUsageMode != nil { - in, out := &in.IndexUsageMode, &out.IndexUsageMode - *out = new(string) - **out = **in - } - if in.IndexUnusedReasons != nil { - in, out := &in.IndexUnusedReasons, &out.IndexUnusedReasons - *out = make([]IndexUnusedReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SearchStatistics. -func (in *SearchStatistics) DeepCopy() *SearchStatistics { - if in == nil { - return nil - } - out := new(SearchStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SerDeInfo) DeepCopyInto(out *SerDeInfo) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.SerializationLibrary != nil { - in, out := &in.SerializationLibrary, &out.SerializationLibrary - *out = new(string) - **out = **in - } - if in.Parameters != nil { - in, out := &in.Parameters, &out.Parameters - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SerDeInfo. -func (in *SerDeInfo) DeepCopy() *SerDeInfo { - if in == nil { - return nil - } - out := new(SerDeInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SessionInfo) DeepCopyInto(out *SessionInfo) { - *out = *in - if in.SessionID != nil { - in, out := &in.SessionID, &out.SessionID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SessionInfo. -func (in *SessionInfo) DeepCopy() *SessionInfo { - if in == nil { - return nil - } - out := new(SessionInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SnapshotDefinition) DeepCopyInto(out *SnapshotDefinition) { - *out = *in - if in.BaseTableReference != nil { - in, out := &in.BaseTableReference, &out.BaseTableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.SnapshotTime != nil { - in, out := &in.SnapshotTime, &out.SnapshotTime - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SnapshotDefinition. -func (in *SnapshotDefinition) DeepCopy() *SnapshotDefinition { - if in == nil { - return nil - } - out := new(SnapshotDefinition) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkOptions) DeepCopyInto(out *SparkOptions) { - *out = *in - if in.Connection != nil { - in, out := &in.Connection, &out.Connection - *out = new(string) - **out = **in - } - if in.RuntimeVersion != nil { - in, out := &in.RuntimeVersion, &out.RuntimeVersion - *out = new(string) - **out = **in - } - if in.ContainerImage != nil { - in, out := &in.ContainerImage, &out.ContainerImage - *out = new(string) - **out = **in - } - if in.Properties != nil { - in, out := &in.Properties, &out.Properties - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.MainFileUri != nil { - in, out := &in.MainFileUri, &out.MainFileUri - *out = new(string) - **out = **in - } - if in.PyFileUris != nil { - in, out := &in.PyFileUris, &out.PyFileUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.JarUris != nil { - in, out := &in.JarUris, &out.JarUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.FileUris != nil { - in, out := &in.FileUris, &out.FileUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.ArchiveUris != nil { - in, out := &in.ArchiveUris, &out.ArchiveUris - *out = make([]string, len(*in)) - copy(*out, *in) - } - if in.MainClass != nil { - in, out := &in.MainClass, &out.MainClass - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkOptions. -func (in *SparkOptions) DeepCopy() *SparkOptions { - if in == nil { - return nil - } - out := new(SparkOptions) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkStatistics) DeepCopyInto(out *SparkStatistics) { - *out = *in - if in.SparkJobID != nil { - in, out := &in.SparkJobID, &out.SparkJobID - *out = new(string) - **out = **in - } - if in.SparkJobLocation != nil { - in, out := &in.SparkJobLocation, &out.SparkJobLocation - *out = new(string) - **out = **in - } - if in.Endpoints != nil { - in, out := &in.Endpoints, &out.Endpoints - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.LoggingInfo != nil { - in, out := &in.LoggingInfo, &out.LoggingInfo - *out = new(SparkStatistics_LoggingInfo) - (*in).DeepCopyInto(*out) - } - if in.KmsKeyName != nil { - in, out := &in.KmsKeyName, &out.KmsKeyName - *out = new(string) - **out = **in - } - if in.GcsStagingBucket != nil { - in, out := &in.GcsStagingBucket, &out.GcsStagingBucket - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkStatistics. -func (in *SparkStatistics) DeepCopy() *SparkStatistics { - if in == nil { - return nil - } - out := new(SparkStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SparkStatistics_LoggingInfo) DeepCopyInto(out *SparkStatistics_LoggingInfo) { - *out = *in - if in.ResourceType != nil { - in, out := &in.ResourceType, &out.ResourceType - *out = new(string) - **out = **in - } - if in.ProjectID != nil { - in, out := &in.ProjectID, &out.ProjectID - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SparkStatistics_LoggingInfo. -func (in *SparkStatistics_LoggingInfo) DeepCopy() *SparkStatistics_LoggingInfo { - if in == nil { - return nil - } - out := new(SparkStatistics_LoggingInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StagePerformanceChangeInsight) DeepCopyInto(out *StagePerformanceChangeInsight) { - *out = *in - if in.StageID != nil { - in, out := &in.StageID, &out.StageID - *out = new(int64) - **out = **in - } - if in.InputDataChange != nil { - in, out := &in.InputDataChange, &out.InputDataChange - *out = new(InputDataChange) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StagePerformanceChangeInsight. -func (in *StagePerformanceChangeInsight) DeepCopy() *StagePerformanceChangeInsight { - if in == nil { - return nil - } - out := new(StagePerformanceChangeInsight) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StagePerformanceStandaloneInsight) DeepCopyInto(out *StagePerformanceStandaloneInsight) { - *out = *in - if in.StageID != nil { - in, out := &in.StageID, &out.StageID - *out = new(int64) - **out = **in - } - if in.SlotContention != nil { - in, out := &in.SlotContention, &out.SlotContention - *out = new(bool) - **out = **in - } - if in.InsufficientShuffleQuota != nil { - in, out := &in.InsufficientShuffleQuota, &out.InsufficientShuffleQuota - *out = new(bool) - **out = **in - } - if in.BiEngineReasons != nil { - in, out := &in.BiEngineReasons, &out.BiEngineReasons - *out = make([]BiEngineReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.HighCardinalityJoins != nil { - in, out := &in.HighCardinalityJoins, &out.HighCardinalityJoins - *out = make([]HighCardinalityJoin, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.PartitionSkew != nil { - in, out := &in.PartitionSkew, &out.PartitionSkew - *out = new(PartitionSkew) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StagePerformanceStandaloneInsight. -func (in *StagePerformanceStandaloneInsight) DeepCopy() *StagePerformanceStandaloneInsight { - if in == nil { - return nil - } - out := new(StagePerformanceStandaloneInsight) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlDataType) DeepCopyInto(out *StandardSqlDataType) { - *out = *in - if in.TypeKind != nil { - in, out := &in.TypeKind, &out.TypeKind - *out = new(string) - **out = **in - } - if in.ArrayElementType != nil { - in, out := &in.ArrayElementType, &out.ArrayElementType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.StructType != nil { - in, out := &in.StructType, &out.StructType - *out = new(StandardSqlStructType) - (*in).DeepCopyInto(*out) - } - if in.RangeElementType != nil { - in, out := &in.RangeElementType, &out.RangeElementType - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlDataType. -func (in *StandardSqlDataType) DeepCopy() *StandardSqlDataType { - if in == nil { - return nil - } - out := new(StandardSqlDataType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlField) DeepCopyInto(out *StandardSqlField) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlField. -func (in *StandardSqlField) DeepCopy() *StandardSqlField { - if in == nil { - return nil - } - out := new(StandardSqlField) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlStructType) DeepCopyInto(out *StandardSqlStructType) { - *out = *in - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlStructType. -func (in *StandardSqlStructType) DeepCopy() *StandardSqlStructType { - if in == nil { - return nil - } - out := new(StandardSqlStructType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StandardSqlTableType) DeepCopyInto(out *StandardSqlTableType) { - *out = *in - if in.Columns != nil { - in, out := &in.Columns, &out.Columns - *out = make([]StandardSqlField, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StandardSqlTableType. -func (in *StandardSqlTableType) DeepCopy() *StandardSqlTableType { - if in == nil { - return nil - } - out := new(StandardSqlTableType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *StorageDescriptor) DeepCopyInto(out *StorageDescriptor) { - *out = *in - if in.LocationUri != nil { - in, out := &in.LocationUri, &out.LocationUri - *out = new(string) - **out = **in - } - if in.InputFormat != nil { - in, out := &in.InputFormat, &out.InputFormat - *out = new(string) - **out = **in - } - if in.OutputFormat != nil { - in, out := &in.OutputFormat, &out.OutputFormat - *out = new(string) - **out = **in - } - if in.SerdeInfo != nil { - in, out := &in.SerdeInfo, &out.SerdeInfo - *out = new(SerDeInfo) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageDescriptor. -func (in *StorageDescriptor) DeepCopy() *StorageDescriptor { - if in == nil { - return nil - } - out := new(StorageDescriptor) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Streamingbuffer) DeepCopyInto(out *Streamingbuffer) { - *out = *in - if in.EstimatedBytes != nil { - in, out := &in.EstimatedBytes, &out.EstimatedBytes - *out = new(uint64) - **out = **in - } - if in.EstimatedRows != nil { - in, out := &in.EstimatedRows, &out.EstimatedRows - *out = new(uint64) - **out = **in - } - if in.OldestEntryTime != nil { - in, out := &in.OldestEntryTime, &out.OldestEntryTime - *out = new(uint64) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Streamingbuffer. -func (in *Streamingbuffer) DeepCopy() *Streamingbuffer { - if in == nil { - return nil - } - out := new(Streamingbuffer) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *SystemVariables) DeepCopyInto(out *SystemVariables) { - *out = *in - if in.Values != nil { - in, out := &in.Values, &out.Values - *out = new(google_protobuf_Struct) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SystemVariables. -func (in *SystemVariables) DeepCopy() *SystemVariables { - if in == nil { - return nil - } - out := new(SystemVariables) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *Table) DeepCopyInto(out *Table) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.ID != nil { - in, out := &in.ID, &out.ID - *out = new(string) - **out = **in - } - if in.SelfLink != nil { - in, out := &in.SelfLink, &out.SelfLink - *out = new(string) - **out = **in - } - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.FriendlyName != nil { - in, out := &in.FriendlyName, &out.FriendlyName - *out = new(string) - **out = **in - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.Labels != nil { - in, out := &in.Labels, &out.Labels - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.Schema != nil { - in, out := &in.Schema, &out.Schema - *out = new(TableSchema) - (*in).DeepCopyInto(*out) - } - if in.TimePartitioning != nil { - in, out := &in.TimePartitioning, &out.TimePartitioning - *out = new(TimePartitioning) - (*in).DeepCopyInto(*out) - } - if in.RangePartitioning != nil { - in, out := &in.RangePartitioning, &out.RangePartitioning - *out = new(RangePartitioning) - (*in).DeepCopyInto(*out) - } - if in.Clustering != nil { - in, out := &in.Clustering, &out.Clustering - *out = new(Clustering) - (*in).DeepCopyInto(*out) - } - if in.RequirePartitionFilter != nil { - in, out := &in.RequirePartitionFilter, &out.RequirePartitionFilter - *out = new(bool) - **out = **in - } - if in.PartitionDefinition != nil { - in, out := &in.PartitionDefinition, &out.PartitionDefinition - *out = new(PartitioningDefinition) - (*in).DeepCopyInto(*out) - } - if in.NumBytes != nil { - in, out := &in.NumBytes, &out.NumBytes - *out = new(int64) - **out = **in - } - if in.NumPhysicalBytes != nil { - in, out := &in.NumPhysicalBytes, &out.NumPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumLongTermBytes != nil { - in, out := &in.NumLongTermBytes, &out.NumLongTermBytes - *out = new(int64) - **out = **in - } - if in.NumRows != nil { - in, out := &in.NumRows, &out.NumRows - *out = new(uint64) - **out = **in - } - if in.CreationTime != nil { - in, out := &in.CreationTime, &out.CreationTime - *out = new(int64) - **out = **in - } - if in.ExpirationTime != nil { - in, out := &in.ExpirationTime, &out.ExpirationTime - *out = new(int64) - **out = **in - } - if in.LastModifiedTime != nil { - in, out := &in.LastModifiedTime, &out.LastModifiedTime - *out = new(uint64) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.View != nil { - in, out := &in.View, &out.View - *out = new(ViewDefinition) - (*in).DeepCopyInto(*out) - } - if in.MaterializedView != nil { - in, out := &in.MaterializedView, &out.MaterializedView - *out = new(MaterializedViewDefinition) - (*in).DeepCopyInto(*out) - } - if in.MaterializedViewStatus != nil { - in, out := &in.MaterializedViewStatus, &out.MaterializedViewStatus - *out = new(MaterializedViewStatus) - (*in).DeepCopyInto(*out) - } - if in.ExternalDataConfiguration != nil { - in, out := &in.ExternalDataConfiguration, &out.ExternalDataConfiguration - *out = new(ExternalDataConfiguration) - (*in).DeepCopyInto(*out) - } - if in.BiglakeConfiguration != nil { - in, out := &in.BiglakeConfiguration, &out.BiglakeConfiguration - *out = new(BigLakeConfiguration) - (*in).DeepCopyInto(*out) - } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } - if in.StreamingBuffer != nil { - in, out := &in.StreamingBuffer, &out.StreamingBuffer - *out = new(Streamingbuffer) - (*in).DeepCopyInto(*out) - } - if in.EncryptionConfiguration != nil { - in, out := &in.EncryptionConfiguration, &out.EncryptionConfiguration - *out = new(EncryptionConfiguration) - (*in).DeepCopyInto(*out) - } - if in.SnapshotDefinition != nil { - in, out := &in.SnapshotDefinition, &out.SnapshotDefinition - *out = new(SnapshotDefinition) - (*in).DeepCopyInto(*out) - } - if in.DefaultCollation != nil { - in, out := &in.DefaultCollation, &out.DefaultCollation - *out = new(string) - **out = **in - } - if in.DefaultRoundingMode != nil { - in, out := &in.DefaultRoundingMode, &out.DefaultRoundingMode - *out = new(string) - **out = **in - } - if in.CloneDefinition != nil { - in, out := &in.CloneDefinition, &out.CloneDefinition - *out = new(CloneDefinition) - (*in).DeepCopyInto(*out) - } - if in.NumTimeTravelPhysicalBytes != nil { - in, out := &in.NumTimeTravelPhysicalBytes, &out.NumTimeTravelPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumTotalLogicalBytes != nil { - in, out := &in.NumTotalLogicalBytes, &out.NumTotalLogicalBytes - *out = new(int64) - **out = **in - } - if in.NumActiveLogicalBytes != nil { - in, out := &in.NumActiveLogicalBytes, &out.NumActiveLogicalBytes - *out = new(int64) - **out = **in - } - if in.NumLongTermLogicalBytes != nil { - in, out := &in.NumLongTermLogicalBytes, &out.NumLongTermLogicalBytes - *out = new(int64) - **out = **in - } - if in.NumCurrentPhysicalBytes != nil { - in, out := &in.NumCurrentPhysicalBytes, &out.NumCurrentPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumTotalPhysicalBytes != nil { - in, out := &in.NumTotalPhysicalBytes, &out.NumTotalPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumActivePhysicalBytes != nil { - in, out := &in.NumActivePhysicalBytes, &out.NumActivePhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumLongTermPhysicalBytes != nil { - in, out := &in.NumLongTermPhysicalBytes, &out.NumLongTermPhysicalBytes - *out = new(int64) - **out = **in - } - if in.NumPartitions != nil { - in, out := &in.NumPartitions, &out.NumPartitions - *out = new(int64) - **out = **in - } - if in.MaxStaleness != nil { - in, out := &in.MaxStaleness, &out.MaxStaleness - *out = new(string) - **out = **in - } - if in.Restrictions != nil { - in, out := &in.Restrictions, &out.Restrictions - *out = new(RestrictionConfig) - (*in).DeepCopyInto(*out) - } - if in.TableConstraints != nil { - in, out := &in.TableConstraints, &out.TableConstraints - *out = new(TableConstraints) - (*in).DeepCopyInto(*out) - } - if in.ResourceTags != nil { - in, out := &in.ResourceTags, &out.ResourceTags - *out = make(map[string]string, len(*in)) - for key, val := range *in { - (*out)[key] = val - } - } - if in.TableReplicationInfo != nil { - in, out := &in.TableReplicationInfo, &out.TableReplicationInfo - *out = new(TableReplicationInfo) - (*in).DeepCopyInto(*out) - } - if in.Replicas != nil { - in, out := &in.Replicas, &out.Replicas - *out = make([]TableReference, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ExternalCatalogTableOptions != nil { - in, out := &in.ExternalCatalogTableOptions, &out.ExternalCatalogTableOptions - *out = new(ExternalCatalogTableOptions) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Table. -func (in *Table) DeepCopy() *Table { - if in == nil { - return nil - } - out := new(Table) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableConstraints) DeepCopyInto(out *TableConstraints) { - *out = *in - if in.PrimaryKey != nil { - in, out := &in.PrimaryKey, &out.PrimaryKey - *out = new(PrimaryKey) - (*in).DeepCopyInto(*out) - } - if in.ForeignKeys != nil { - in, out := &in.ForeignKeys, &out.ForeignKeys - *out = make([]ForeignKey, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableConstraints. -func (in *TableConstraints) DeepCopy() *TableConstraints { - if in == nil { - return nil - } - out := new(TableConstraints) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableFieldSchema) DeepCopyInto(out *TableFieldSchema) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.Mode != nil { - in, out := &in.Mode, &out.Mode - *out = new(string) - **out = **in - } - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]TableFieldSchema, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.Description != nil { - in, out := &in.Description, &out.Description - *out = new(string) - **out = **in - } - if in.PolicyTags != nil { - in, out := &in.PolicyTags, &out.PolicyTags - *out = new(TableFieldSchema_PolicyTagList) - (*in).DeepCopyInto(*out) - } - if in.DataPolicies != nil { - in, out := &in.DataPolicies, &out.DataPolicies - *out = make([]DataPolicyOption, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.MaxLength != nil { - in, out := &in.MaxLength, &out.MaxLength - *out = new(int64) - **out = **in - } - if in.Precision != nil { - in, out := &in.Precision, &out.Precision - *out = new(int64) - **out = **in - } - if in.Scale != nil { - in, out := &in.Scale, &out.Scale - *out = new(int64) - **out = **in - } - if in.RoundingMode != nil { - in, out := &in.RoundingMode, &out.RoundingMode - *out = new(string) - **out = **in - } - if in.Collation != nil { - in, out := &in.Collation, &out.Collation - *out = new(string) - **out = **in - } - if in.DefaultValueExpression != nil { - in, out := &in.DefaultValueExpression, &out.DefaultValueExpression - *out = new(string) - **out = **in - } - if in.RangeElementType != nil { - in, out := &in.RangeElementType, &out.RangeElementType - *out = new(TableFieldSchema_FieldElementType) - (*in).DeepCopyInto(*out) - } - if in.ForeignTypeDefinition != nil { - in, out := &in.ForeignTypeDefinition, &out.ForeignTypeDefinition - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableFieldSchema. -func (in *TableFieldSchema) DeepCopy() *TableFieldSchema { - if in == nil { - return nil - } - out := new(TableFieldSchema) + out := new(LinkedDatasetSource) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableFieldSchema_FieldElementType) DeepCopyInto(out *TableFieldSchema_FieldElementType) { +func (in *RestrictionConfig) DeepCopyInto(out *RestrictionConfig) { *out = *in if in.Type != nil { in, out := &in.Type, &out.Type @@ -7663,114 +537,42 @@ func (in *TableFieldSchema_FieldElementType) DeepCopyInto(out *TableFieldSchema_ } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableFieldSchema_FieldElementType. -func (in *TableFieldSchema_FieldElementType) DeepCopy() *TableFieldSchema_FieldElementType { - if in == nil { - return nil - } - out := new(TableFieldSchema_FieldElementType) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableFieldSchema_PolicyTagList) DeepCopyInto(out *TableFieldSchema_PolicyTagList) { - *out = *in - if in.Names != nil { - in, out := &in.Names, &out.Names - *out = make([]string, len(*in)) - copy(*out, *in) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableFieldSchema_PolicyTagList. -func (in *TableFieldSchema_PolicyTagList) DeepCopy() *TableFieldSchema_PolicyTagList { - if in == nil { - return nil - } - out := new(TableFieldSchema_PolicyTagList) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableList) DeepCopyInto(out *TableList) { - *out = *in - if in.Kind != nil { - in, out := &in.Kind, &out.Kind - *out = new(string) - **out = **in - } - if in.Etag != nil { - in, out := &in.Etag, &out.Etag - *out = new(string) - **out = **in - } - if in.NextPageToken != nil { - in, out := &in.NextPageToken, &out.NextPageToken - *out = new(string) - **out = **in - } - if in.Tables != nil { - in, out := &in.Tables, &out.Tables - *out = make([]ListFormatTable, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.TotalItems != nil { - in, out := &in.TotalItems, &out.TotalItems - *out = new(int32) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableList. -func (in *TableList) DeepCopy() *TableList { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RestrictionConfig. +func (in *RestrictionConfig) DeepCopy() *RestrictionConfig { if in == nil { return nil } - out := new(TableList) + out := new(RestrictionConfig) in.DeepCopyInto(out) return out } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableMetadataCacheUsage) DeepCopyInto(out *TableMetadataCacheUsage) { +func (in *RoutineReference) DeepCopyInto(out *RoutineReference) { *out = *in - if in.TableReference != nil { - in, out := &in.TableReference, &out.TableReference - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.UnusedReason != nil { - in, out := &in.UnusedReason, &out.UnusedReason - *out = new(string) - **out = **in - } - if in.Explanation != nil { - in, out := &in.Explanation, &out.Explanation + if in.ProjectId != nil { + in, out := &in.ProjectId, &out.ProjectId *out = new(string) **out = **in } - if in.Staleness != nil { - in, out := &in.Staleness, &out.Staleness + if in.DatasetId != nil { + in, out := &in.DatasetId, &out.DatasetId *out = new(string) **out = **in } - if in.TableType != nil { - in, out := &in.TableType, &out.TableType + if in.RoutineId != nil { + in, out := &in.RoutineId, &out.RoutineId *out = new(string) **out = **in } } -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableMetadataCacheUsage. -func (in *TableMetadataCacheUsage) DeepCopy() *TableMetadataCacheUsage { +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RoutineReference. +func (in *RoutineReference) DeepCopy() *RoutineReference { if in == nil { return nil } - out := new(TableMetadataCacheUsage) + out := new(RoutineReference) in.DeepCopyInto(out) return out } @@ -7804,231 +606,3 @@ func (in *TableReference) DeepCopy() *TableReference { in.DeepCopyInto(out) return out } - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableReplicationInfo) DeepCopyInto(out *TableReplicationInfo) { - *out = *in - if in.SourceTable != nil { - in, out := &in.SourceTable, &out.SourceTable - *out = new(TableReference) - (*in).DeepCopyInto(*out) - } - if in.ReplicationIntervalMs != nil { - in, out := &in.ReplicationIntervalMs, &out.ReplicationIntervalMs - *out = new(int64) - **out = **in - } - if in.ReplicatedSourceLastRefreshTime != nil { - in, out := &in.ReplicatedSourceLastRefreshTime, &out.ReplicatedSourceLastRefreshTime - *out = new(int64) - **out = **in - } - if in.ReplicationStatus != nil { - in, out := &in.ReplicationStatus, &out.ReplicationStatus - *out = new(string) - **out = **in - } - if in.ReplicationError != nil { - in, out := &in.ReplicationError, &out.ReplicationError - *out = new(ErrorProto) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableReplicationInfo. -func (in *TableReplicationInfo) DeepCopy() *TableReplicationInfo { - if in == nil { - return nil - } - out := new(TableReplicationInfo) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TableSchema) DeepCopyInto(out *TableSchema) { - *out = *in - if in.Fields != nil { - in, out := &in.Fields, &out.Fields - *out = make([]TableFieldSchema, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.ForeignTypeInfo != nil { - in, out := &in.ForeignTypeInfo, &out.ForeignTypeInfo - *out = new(ForeignTypeInfo) - (*in).DeepCopyInto(*out) - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TableSchema. -func (in *TableSchema) DeepCopy() *TableSchema { - if in == nil { - return nil - } - out := new(TableSchema) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TimePartitioning) DeepCopyInto(out *TimePartitioning) { - *out = *in - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(string) - **out = **in - } - if in.ExpirationMs != nil { - in, out := &in.ExpirationMs, &out.ExpirationMs - *out = new(int64) - **out = **in - } - if in.Field != nil { - in, out := &in.Field, &out.Field - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TimePartitioning. -func (in *TimePartitioning) DeepCopy() *TimePartitioning { - if in == nil { - return nil - } - out := new(TimePartitioning) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *TransformColumn) DeepCopyInto(out *TransformColumn) { - *out = *in - if in.Name != nil { - in, out := &in.Name, &out.Name - *out = new(string) - **out = **in - } - if in.Type != nil { - in, out := &in.Type, &out.Type - *out = new(StandardSqlDataType) - (*in).DeepCopyInto(*out) - } - if in.TransformSql != nil { - in, out := &in.TransformSql, &out.TransformSql - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TransformColumn. -func (in *TransformColumn) DeepCopy() *TransformColumn { - if in == nil { - return nil - } - out := new(TransformColumn) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *UserDefinedFunctionResource) DeepCopyInto(out *UserDefinedFunctionResource) { - *out = *in - if in.ResourceUri != nil { - in, out := &in.ResourceUri, &out.ResourceUri - *out = new(string) - **out = **in - } - if in.InlineCode != nil { - in, out := &in.InlineCode, &out.InlineCode - *out = new(string) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new UserDefinedFunctionResource. -func (in *UserDefinedFunctionResource) DeepCopy() *UserDefinedFunctionResource { - if in == nil { - return nil - } - out := new(UserDefinedFunctionResource) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *VectorSearchStatistics) DeepCopyInto(out *VectorSearchStatistics) { - *out = *in - if in.IndexUsageMode != nil { - in, out := &in.IndexUsageMode, &out.IndexUsageMode - *out = new(string) - **out = **in - } - if in.IndexUnusedReasons != nil { - in, out := &in.IndexUnusedReasons, &out.IndexUnusedReasons - *out = make([]IndexUnusedReason, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VectorSearchStatistics. -func (in *VectorSearchStatistics) DeepCopy() *VectorSearchStatistics { - if in == nil { - return nil - } - out := new(VectorSearchStatistics) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *ViewDefinition) DeepCopyInto(out *ViewDefinition) { - *out = *in - if in.Query != nil { - in, out := &in.Query, &out.Query - *out = new(string) - **out = **in - } - if in.UserDefinedFunctionResources != nil { - in, out := &in.UserDefinedFunctionResources, &out.UserDefinedFunctionResources - *out = make([]UserDefinedFunctionResource, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } - if in.UseLegacySql != nil { - in, out := &in.UseLegacySql, &out.UseLegacySql - *out = new(bool) - **out = **in - } - if in.UseExplicitColumnNames != nil { - in, out := &in.UseExplicitColumnNames, &out.UseExplicitColumnNames - *out = new(bool) - **out = **in - } - if in.PrivacyPolicy != nil { - in, out := &in.PrivacyPolicy, &out.PrivacyPolicy - *out = new(PrivacyPolicy) - (*in).DeepCopyInto(*out) - } - if in.ForeignDefinitions != nil { - in, out := &in.ForeignDefinitions, &out.ForeignDefinitions - *out = make([]ForeignViewDefinition, len(*in)) - for i := range *in { - (*in)[i].DeepCopyInto(&(*out)[i]) - } - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ViewDefinition. -func (in *ViewDefinition) DeepCopy() *ViewDefinition { - if in == nil { - return nil - } - out := new(ViewDefinition) - in.DeepCopyInto(out) - return out -} diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml index d6c6320192..ba6d1e1e76 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatasets.bigquery.cnrm.cloud.google.com.yaml @@ -77,14 +77,13 @@ spec: description: The dataset this entry applies to. properties: datasetId: - description: Required. A unique ID for this dataset, - without the project name. The ID must contain only - letters (a-z, A-Z), numbers (0-9), or underscores - (_). The maximum length is 1,024 characters. + description: A unique Id for this dataset, without the + project name. The Id must contain only letters (a-z, + A-Z), numbers (0-9), or underscores (_). The maximum + length is 1,024 characters. type: string projectId: - description: Required. The ID of the project containing - this dataset. + description: The ID of the project containing this dataset. type: string required: - datasetId @@ -140,16 +139,14 @@ spec: an update operation.' properties: datasetId: - description: Required. The ID of the dataset containing - this routine. + description: The ID of the dataset containing this routine. type: string projectId: - description: Required. The ID of the project containing - this routine. + description: The ID of the project containing this routine. type: string routineId: - description: Required. The ID of the routine. The ID must - contain only letters (a-z, A-Z), numbers (0-9), or underscores + description: The Id of the routine. The Id must contain + only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. type: string required: @@ -182,20 +179,18 @@ spec: granted again via an update operation.' properties: datasetId: - description: Required. The ID of the dataset containing - this table. + description: The ID of the dataset containing this table. type: string projectId: - description: Required. The ID of the project containing - this table. + description: The ID of the project containing this table. type: string tableId: - description: Required. The ID of the table. The ID can contain - Unicode characters in category L (letter), M (mark), N - (number), Pc (connector, including underscore), Pd (dash), - and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). + description: The Id of the table. The Id can contain Unicode + characters in category L (letter), M (mark), N (number), + Pc (connector, including underscore), Pd (dash), and Zs + (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations - allow suffixing of the table ID with a partition decorator, + allow suffixing of the table Id with a partition decorator, such as `sample_table$20190123`. type: string required: @@ -309,7 +304,7 @@ spec: is 168 hours if this is not set. type: string projectRef: - description: The project that this resource belongs to. optional. + description: ' Optional. The project that this resource belongs to.' oneOf: - not: required: @@ -347,6 +342,8 @@ spec: storageBillingModel: description: Optional. Updates storage_billing_model for the dataset. type: string + required: + - location type: object status: description: BigQueryDatasetStatus defines the config connector machine @@ -386,6 +383,10 @@ spec: etag: description: Output only. A hash of the resource. type: string + externalRef: + description: A unique specifier for the BigQueryAnalyticsHubDataExchangeListing + resource in GCP. + type: string lastModifiedTime: description: Output only. The date when this dataset was last modified, in milliseconds since the epoch. diff --git a/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml index 7782391cfe..a499069c59 100644 --- a/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml +++ b/config/samples/resources/bigquerytable/bigquery_v1beta1_bigquerydataset.yaml @@ -17,4 +17,5 @@ kind: BigQueryDataset metadata: name: bigquerytabledep spec: - friendlyName: bigquerytable-dep \ No newline at end of file + friendlyName: bigquerytable-dep + location: us-central1 diff --git a/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml index 7b466b014d..d2d1f5d00a 100644 --- a/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml +++ b/config/samples/resources/dataflowflextemplatejob/streaming-dataflow-flex-template-job/bigquery_v1beta1_bigquerydataset.yaml @@ -16,3 +16,5 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: dataflowflextemplatejobdepstreaming +spec: + location: us-central1 diff --git a/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml index 6bcdebf190..e9e651d7e3 100644 --- a/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml +++ b/config/samples/resources/pubsubsubscription/bigquery-pubsub-subscription/bigquery_v1beta1_bigquerydataset.yaml @@ -22,3 +22,4 @@ metadata: cnrm.cloud.google.com/project-id: ${PROJECT_ID?} spec: resourceID: pubsubsubscriptiondepbigquery + location: us-central1 diff --git a/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go b/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go index 13da3ee538..6359f7e78c 100644 --- a/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go +++ b/dev/tools/controllerbuilder/pkg/codegen/mappergenerator.go @@ -132,6 +132,8 @@ func (v *MapperGenerator) visitMessage(msg protoreflect.MessageDescriptor) { switch protoGoPackage { case "cloud.google.com/go/networkconnectivity/apiv1/networkconnectivitypb": protoGoPackage = "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/generated/mockgcp/cloud/networkconnectivity/v1" + case "cloud.google.com/go/bigquery/apiv2/bigquerypb": + protoGoPackage = "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/generated/mockgcp/cloud/bigquery/v2" } for _, goType := range goTypes { diff --git a/go.mod b/go.mod index 36c7d6e458..db15dbd7f7 100644 --- a/go.mod +++ b/go.mod @@ -49,6 +49,7 @@ require ( github.com/hashicorp/hcl/v2 v2.19.1 github.com/hashicorp/terraform-plugin-sdk/v2 v2.24.0 github.com/hashicorp/terraform-provider-google-beta v3.73.0+incompatible + github.com/huandu/go-clone v1.7.2 github.com/nasa9084/go-openapi v0.0.0-20200604141640-2875b7376353 github.com/olekukonko/tablewriter v0.0.5 github.com/onsi/gomega v1.27.10 @@ -102,7 +103,6 @@ require ( github.com/Microsoft/go-winio v0.6.1 // indirect github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 // indirect github.com/agext/levenshtein v1.2.3 // indirect - github.com/apache/arrow/go/v15 v15.0.2 // indirect github.com/apparentlymart/go-cidr v1.1.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect @@ -136,13 +136,11 @@ require ( github.com/go-openapi/jsonreference v0.20.2 // indirect github.com/go-openapi/swag v0.22.3 // indirect github.com/gobuffalo/flect v0.2.3 // indirect - github.com/goccy/go-json v0.10.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/glog v1.2.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/btree v1.1.3 // indirect - github.com/google/flatbuffers v23.5.26+incompatible // indirect github.com/google/gnostic v0.6.9 // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/gofuzz v1.2.0 // indirect @@ -179,8 +177,6 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/klauspost/compress v1.16.7 // indirect - github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect github.com/mailru/easyjson v0.7.7 // indirect @@ -204,7 +200,6 @@ require ( github.com/oklog/run v1.0.0 // indirect github.com/onsi/ginkgo v1.16.5 // indirect github.com/peterbourgon/diskv v2.0.1+incompatible // indirect - github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/prometheus/client_model v0.6.0 // indirect @@ -219,7 +214,6 @@ require ( github.com/vmihailenco/tagparser v0.1.2 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xlab/treeprint v1.1.0 // indirect - github.com/zeebo/xxh3 v1.0.2 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect go.opentelemetry.io/otel v1.29.0 // indirect @@ -230,14 +224,12 @@ require ( go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect go.uber.org/multierr v1.10.0 // indirect golang.org/x/crypto v0.28.0 // indirect - golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect golang.org/x/mod v0.20.0 // indirect golang.org/x/net v0.30.0 // indirect golang.org/x/sys v0.26.0 // indirect golang.org/x/term v0.25.0 // indirect golang.org/x/text v0.19.0 // indirect golang.org/x/tools v0.24.0 // indirect - golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect gomodules.xyz/jsonpatch/v2 v2.3.0 // indirect google.golang.org/appengine v1.6.8 // indirect gopkg.in/inf.v0 v0.9.1 // indirect diff --git a/go.sum b/go.sum index c610b36b93..c715574541 100644 --- a/go.sum +++ b/go.sum @@ -48,8 +48,6 @@ cloud.google.com/go/compute v1.28.1 h1:XwPcZjgMCnU2tkwY10VleUjSAfpTj9RDn+kGrbYsi cloud.google.com/go/compute v1.28.1/go.mod h1:b72iXMY4FucVry3NR3Li4kVyyTvbMDE7x5WsqvxjsYk= cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixAHn3fyqngqo= cloud.google.com/go/compute/metadata v0.5.2/go.mod h1:C66sj2AluDcIqakBq/M8lw8/ybHgOZqin2obFxa/E5k= -cloud.google.com/go/datacatalog v1.22.1 h1:i0DyKb/o7j+0vgaFtimcRFjYsD6wFw1jpnODYUyiYRs= -cloud.google.com/go/datacatalog v1.22.1/go.mod h1:MscnJl9B2lpYlFoxRjicw19kFTwEke8ReKL5Y/6TWg8= cloud.google.com/go/dataflow v0.10.1 h1:RoVpCZ1BjJBH/5mzaXCgNg+l9FgTIYQ7C9xBRGvhkzo= cloud.google.com/go/dataflow v0.10.1/go.mod h1:zP4/tNjONFRcS4NcI9R94YDQEkPalimdbPkijVNJt/g= cloud.google.com/go/dataform v0.10.1 h1:FkOPrxf8sN9J2TMc4CIBhVivhMiO8D0eYN33s5A5Uo4= @@ -154,8 +152,6 @@ github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYU github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/apache/arrow/go/v15 v15.0.2 h1:60IliRbiyTWCWjERBCkO1W4Qun9svcYoZrSLcyOsMLE= -github.com/apache/arrow/go/v15 v15.0.2/go.mod h1:DGXsR3ajT524njufqf95822i+KTh+yea1jass9YXgjA= github.com/apparentlymart/go-cidr v1.1.0 h1:2mAhrMoF+nhXqxTzSZMUzDHkLjmIHC+Zzn4tdgBZjnU= github.com/apparentlymart/go-cidr v1.1.0/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= @@ -409,8 +405,6 @@ github.com/go-test/deep v1.0.7 h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M= github.com/go-test/deep v1.0.7/go.mod h1:QV8Hv/iy04NyLBxAdO9njL0iVPN1S4d/A3NVv1V36o8= github.com/gobuffalo/flect v0.2.3 h1:f/ZukRnSNA/DUpSNDadko7Qc0PhGvsew35p/2tu+CRY= github.com/gobuffalo/flect v0.2.3/go.mod h1:vmkQwuZYhN5Pc4ljYQZzP+1sq+NEkK+lh20jmEmX3jc= -github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= -github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= @@ -465,8 +459,6 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= -github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg= -github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -492,12 +484,9 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/ github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= -github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -625,6 +614,10 @@ github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734/go.mod github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ= github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= +github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= +github.com/huandu/go-clone v1.7.2 h1:3+Aq0Ed8XK+zKkLjE2dfHg0XrpIfcohBE1K+c8Usxoo= +github.com/huandu/go-clone v1.7.2/go.mod h1:ReGivhG6op3GYr+UY3lS6mxjKp7MIGTknuU5TbTVaXE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= @@ -665,10 +658,6 @@ github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvW github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= -github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= -github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= -github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -803,8 +792,6 @@ github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/9 github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ= -github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -965,10 +952,6 @@ github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uU github.com/zclconf/go-cty v1.13.0 h1:It5dfKTTZHe9aeppbNOda3mN7Ag7sg6QkBNm6TkyFa0= github.com/zclconf/go-cty v1.13.0/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= -github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= -github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= -github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= -github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= @@ -1059,8 +1042,6 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= -golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1371,14 +1352,10 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= gomodules.xyz/jsonpatch/v2 v2.0.1/go.mod h1:IhYNNY4jnS53ZnfE4PAmpKtDpTCj1JFXc+3mwe7XcUU= gomodules.xyz/jsonpatch/v2 v2.3.0 h1:8NFhfS6gzxNqjLIYnZxg319wZ5Qjnx4m/CcX+Klzazc= gomodules.xyz/jsonpatch/v2 v2.3.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0= -gonum.org/v1/gonum v0.12.0 h1:xKuo6hzt+gMav00meVPUlXwSdoEJP46BR+wdxQEFK2o= -gonum.org/v1/gonum v0.12.0/go.mod h1:73TDxJfAAHeA8Mk9mf8NlIppyhQNo5GLTcYeqgo2lvY= gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw= gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= diff --git a/mockgcp/mockbigquery/datasets.go b/mockgcp/mockbigquery/datasets.go index 243a442b68..6dc1d09efe 100644 --- a/mockgcp/mockbigquery/datasets.go +++ b/mockgcp/mockbigquery/datasets.go @@ -53,7 +53,6 @@ func (s *datasetsServer) GetDataset(ctx context.Context, req *pb.GetDatasetReque } return nil, err } - if obj.MaxTimeTravelHours == nil { obj.MaxTimeTravelHours = &defaultMaxTimeTravelHours } diff --git a/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go b/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go index 891088e838..24c87195db 100644 --- a/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go +++ b/pkg/clients/generated/apis/bigquery/v1beta1/bigquerydataset_types.go @@ -91,10 +91,10 @@ type DatasetAccess struct { } type DatasetDataset struct { - /* Required. A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. */ + /* A unique Id for this dataset, without the project name. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters. */ DatasetId string `json:"datasetId"` - /* Required. The ID of the project containing this dataset. */ + /* The ID of the project containing this dataset. */ ProjectId string `json:"projectId"` } @@ -105,24 +105,24 @@ type DatasetDefaultEncryptionConfiguration struct { } type DatasetRoutine struct { - /* Required. The ID of the dataset containing this routine. */ + /* The ID of the dataset containing this routine. */ DatasetId string `json:"datasetId"` - /* Required. The ID of the project containing this routine. */ + /* The ID of the project containing this routine. */ ProjectId string `json:"projectId"` - /* Required. The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. */ + /* The Id of the routine. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters. */ RoutineId string `json:"routineId"` } type DatasetView struct { - /* Required. The ID of the dataset containing this table. */ + /* The ID of the dataset containing this table. */ DatasetId string `json:"datasetId"` - /* Required. The ID of the project containing this table. */ + /* The ID of the project containing this table. */ ProjectId string `json:"projectId"` - /* Required. The ID of the table. The ID can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table ID with a partition decorator, such as `sample_table$20190123`. */ + /* The Id of the table. The Id can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table Id with a partition decorator, such as `sample_table$20190123`. */ TableId string `json:"tableId"` } @@ -179,14 +179,13 @@ type BigQueryDatasetSpec struct { IsCaseInsensitive *bool `json:"isCaseInsensitive,omitempty"` /* The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations. */ - // +optional - Location *string `json:"location,omitempty"` + Location string `json:"location"` /* Optional. Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days). The default value is 168 hours if this is not set. */ // +optional MaxTimeTravelHours *string `json:"maxTimeTravelHours,omitempty"` - /* The project that this resource belongs to. optional. */ + /* Optional. The project that this resource belongs to. */ // +optional ProjectRef *v1alpha1.ResourceRef `json:"projectRef,omitempty"` @@ -211,6 +210,10 @@ type BigQueryDatasetStatus struct { // +optional Etag *string `json:"etag,omitempty"` + /* A unique specifier for the BigQueryAnalyticsHubDataExchangeListing resource in GCP. */ + // +optional + ExternalRef *string `json:"externalRef,omitempty"` + /* Output only. The date when this dataset was last modified, in milliseconds since the epoch. */ // +optional LastModifiedTime *int64 `json:"lastModifiedTime,omitempty"` diff --git a/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go index ff2aa02203..c23d48c6fd 100644 --- a/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go +++ b/pkg/clients/generated/apis/bigquery/v1beta1/zz_generated.deepcopy.go @@ -135,11 +135,6 @@ func (in *BigQueryDatasetSpec) DeepCopyInto(out *BigQueryDatasetSpec) { *out = new(bool) **out = **in } - if in.Location != nil { - in, out := &in.Location, &out.Location - *out = new(string) - **out = **in - } if in.MaxTimeTravelHours != nil { in, out := &in.MaxTimeTravelHours, &out.MaxTimeTravelHours *out = new(string) @@ -191,6 +186,11 @@ func (in *BigQueryDatasetStatus) DeepCopyInto(out *BigQueryDatasetStatus) { *out = new(string) **out = **in } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } if in.LastModifiedTime != nil { in, out := &in.LastModifiedTime, &out.LastModifiedTime *out = new(int64) diff --git a/pkg/controller/direct/bigquery/v2/mapper.generated.go b/pkg/controller/direct/bigquery/v2/mapper.generated.go new file mode 100644 index 0000000000..f58fc9b069 --- /dev/null +++ b/pkg/controller/direct/bigquery/v2/mapper.generated.go @@ -0,0 +1,248 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquery + +import ( + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" + pb "github.com/GoogleCloudPlatform/k8s-config-connector/mockgcp/generated/mockgcp/cloud/bigquery/v2" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" +) + +func Access_FromProto(mapCtx *direct.MapContext, in *pb.DatasetAccess) *krm.Access { + if in == nil { + return nil + } + out := &krm.Access{} + out.Role = direct.LazyPtr(in.GetRole()) + out.UserByEmail = direct.LazyPtr(in.GetUserByEmail()) + out.GroupByEmail = direct.LazyPtr(in.GetGroupByEmail()) + out.Domain = direct.LazyPtr(in.GetDomain()) + out.SpecialGroup = direct.LazyPtr(in.GetSpecialGroup()) + out.IamMember = direct.LazyPtr(in.GetIamMember()) + out.View = ReferencedTable_FromProto(mapCtx, in.GetView()) + out.Routine = ReferencedRoutine_FromProto(mapCtx, in.GetRoutine()) + out.Dataset = DatasetAccessEntry_FromProto(mapCtx, in.GetDataset()) + return out +} +func Access_ToProto(mapCtx *direct.MapContext, in *krm.Access) *pb.DatasetAccess { + if in == nil { + return nil + } + out := &pb.DatasetAccess{} + out.Role = in.Role + out.UserByEmail = in.UserByEmail + out.GroupByEmail = in.GroupByEmail + out.Domain = in.Domain + out.SpecialGroup = in.SpecialGroup + out.IamMember = in.IamMember + out.View = ReferencedTable_ToProto(mapCtx, in.View) + out.Routine = ReferencedRoutine_ToProto(mapCtx, in.Routine) + out.Dataset = DatasetAccessEntry_ToProto(mapCtx, in.Dataset) + return out +} + +func DatasetAccessEntry_FromProto(mapCtx *direct.MapContext, in *pb.DatasetAccessEntry) *krm.DatasetAccessEntry { + if in == nil { + return nil + } + out := &krm.DatasetAccessEntry{} + out.Dataset = DatasetReference_FromProto(mapCtx, in.GetDataset()) + out.TargetTypes = in.TargetTypes + return out +} +func DatasetAccessEntry_ToProto(mapCtx *direct.MapContext, in *krm.DatasetAccessEntry) *pb.DatasetAccessEntry { + if in == nil { + return nil + } + out := &pb.DatasetAccessEntry{} + out.Dataset = DatasetReference_ToProto(mapCtx, in.Dataset) + out.TargetTypes = in.TargetTypes + return out +} +func DatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.DatasetReference) *krm.DatasetReference { + if in == nil { + return nil + } + out := &krm.DatasetReference{} + out.DatasetId = in.DatasetId + out.ProjectId = in.ProjectId + return out +} +func DatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.DatasetReference) *pb.DatasetReference { + if in == nil { + return nil + } + out := &pb.DatasetReference{} + out.DatasetId = in.DatasetId + out.ProjectId = in.ProjectId + return out +} +func EncryptionConfiguration_FromProto(mapCtx *direct.MapContext, in *pb.EncryptionConfiguration) *krm.EncryptionConfiguration { + if in == nil { + return nil + } + out := &krm.EncryptionConfiguration{} + out.KmsKeyRef.Name = *in.KmsKeyName + return out +} +func EncryptionConfiguration_ToProto(mapCtx *direct.MapContext, in *krm.EncryptionConfiguration) *pb.EncryptionConfiguration { + if in == nil { + return nil + } + out := &pb.EncryptionConfiguration{} + out.KmsKeyName = &in.KmsKeyRef.Name + return out +} + +func ExternalCatalogDatasetOptions_FromProto(mapCtx *direct.MapContext, in *pb.ExternalCatalogDatasetOptions) *krm.ExternalCatalogDatasetOptions { + if in == nil { + return nil + } + out := &krm.ExternalCatalogDatasetOptions{} + out.Parameters = in.Parameters + out.DefaultStorageLocationUri = direct.LazyPtr(in.GetDefaultStorageLocationUri()) + return out +} +func ExternalCatalogDatasetOptions_ToProto(mapCtx *direct.MapContext, in *krm.ExternalCatalogDatasetOptions) *pb.ExternalCatalogDatasetOptions { + if in == nil { + return nil + } + out := &pb.ExternalCatalogDatasetOptions{} + out.Parameters = in.Parameters + out.DefaultStorageLocationUri = in.DefaultStorageLocationUri + return out +} +func ExternalDatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.ExternalDatasetReference) *krm.ExternalDatasetReference { + if in == nil { + return nil + } + out := &krm.ExternalDatasetReference{} + out.ExternalSource = direct.LazyPtr(in.GetExternalSource()) + out.Connection = direct.LazyPtr(in.GetConnection()) + return out +} +func ExternalDatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.ExternalDatasetReference) *pb.ExternalDatasetReference { + if in == nil { + return nil + } + out := &pb.ExternalDatasetReference{} + out.ExternalSource = in.ExternalSource + out.Connection = in.Connection + return out +} +func GcpTag_FromProto(mapCtx *direct.MapContext, in *pb.DatasetTags) *krm.GcpTag { + if in == nil { + return nil + } + out := &krm.GcpTag{} + out.TagKey = direct.LazyPtr(in.GetTagKey()) + out.TagValue = direct.LazyPtr(in.GetTagValue()) + return out +} +func GcpTag_ToProto(mapCtx *direct.MapContext, in *krm.GcpTag) *pb.DatasetTags { + if in == nil { + return nil + } + out := &pb.DatasetTags{} + out.TagKey = in.TagKey + out.TagValue = in.TagValue + return out +} +func LinkedDatasetMetadata_FromProto(mapCtx *direct.MapContext, in *pb.LinkedDatasetMetadata) *krm.LinkedDatasetMetadata { + if in == nil { + return nil + } + out := &krm.LinkedDatasetMetadata{} + return out +} +func LinkedDatasetMetadata_ToProto(mapCtx *direct.MapContext, in *krm.LinkedDatasetMetadata) *pb.LinkedDatasetMetadata { + if in == nil { + return nil + } + out := &pb.LinkedDatasetMetadata{} + return out +} +func LinkedDatasetSource_FromProto(mapCtx *direct.MapContext, in *pb.LinkedDatasetSource) *krm.LinkedDatasetSource { + if in == nil { + return nil + } + out := &krm.LinkedDatasetSource{} + out.SourceDataset = DatasetReference_FromProto(mapCtx, in.GetSourceDataset()) + return out +} +func LinkedDatasetSource_ToProto(mapCtx *direct.MapContext, in *krm.LinkedDatasetSource) *pb.LinkedDatasetSource { + if in == nil { + return nil + } + out := &pb.LinkedDatasetSource{} + out.SourceDataset = DatasetReference_ToProto(mapCtx, in.SourceDataset) + return out +} +func ReferencedTable_FromProto(mapCtx *direct.MapContext, in *pb.TableReference) *krm.TableReference { + if in == nil { + return nil + } + out := &krm.TableReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.TableId = in.TableId + return out +} +func ReferencedTable_ToProto(mapCtx *direct.MapContext, in *krm.TableReference) *pb.TableReference { + if in == nil { + return nil + } + out := &pb.TableReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.TableId = in.TableId + return out +} +func ReferencedRoutine_FromProto(mapCtx *direct.MapContext, in *pb.RoutineReference) *krm.RoutineReference { + if in == nil { + return nil + } + out := &krm.RoutineReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.RoutineId = in.RoutineId + return out +} +func ReferencedRoutine_ToProto(mapCtx *direct.MapContext, in *krm.RoutineReference) *pb.RoutineReference { + if in == nil { + return nil + } + out := &pb.RoutineReference{} + out.ProjectId = in.ProjectId + out.DatasetId = in.DatasetId + out.RoutineId = in.RoutineId + return out +} +func RestrictionConfig_FromProto(mapCtx *direct.MapContext, in *pb.RestrictionConfig) *krm.RestrictionConfig { + if in == nil { + return nil + } + out := &krm.RestrictionConfig{} + out.Type = direct.LazyPtr(in.GetType()) + return out +} +func RestrictionConfig_ToProto(mapCtx *direct.MapContext, in *krm.RestrictionConfig) *pb.RestrictionConfig { + if in == nil { + return nil + } + out := &pb.RestrictionConfig{} + out.Type = in.Type + return out +} diff --git a/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go b/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go index b7bc5b841a..05f6649eda 100644 --- a/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go +++ b/pkg/controller/direct/bigquerydataset/bigquerydataset_mappings.go @@ -21,314 +21,208 @@ package bigquerydataset import ( - "fmt" - "time" + "strconv" - pb "cloud.google.com/go/bigquery" krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" + api "google.golang.org/api/bigquery/v2" ) -func Access_FromProto(mapCtx *direct.MapContext, in *pb.AccessEntry) *krm.Access { +func BigQueryDatasetSpec_ToAPI(mapCtx *direct.MapContext, in *krm.BigQueryDatasetSpec, name string) *api.Dataset { if in == nil { return nil } - out := &krm.Access{} - out.Role = direct.LazyPtr(fmt.Sprintf("%s", in.Role)) - switch in.EntityType { - case 1: - out.Domain = direct.LazyPtr(in.Entity) - case 2: - out.GroupByEmail = direct.LazyPtr(in.Entity) - case 3: - out.UserByEmail = direct.LazyPtr(in.Entity) - case 4: - out.SpecialGroup = direct.LazyPtr(in.Entity) - case 6: - out.IamMember = direct.LazyPtr(in.Entity) - } - out.View = TableReference_FromProto(mapCtx, in.View) - out.Routine = RoutineReference_FromProto(mapCtx, in.Routine) - out.Dataset = DatasetAccessEntry_FromProto(mapCtx, in.Dataset) - return out -} -func Access_ToProto(mapCtx *direct.MapContext, in *krm.Access) *pb.AccessEntry { - if in == nil { - return nil - } - out := &pb.AccessEntry{} - out.Role = pb.AccessRole(direct.ValueOf(in.Role)) - if in.Domain != nil { - out.EntityType = 1 - out.Entity = direct.ValueOf(in.Domain) - } - if in.GroupByEmail != nil { - out.EntityType = 2 - out.Entity = direct.ValueOf(in.GroupByEmail) - } - if in.UserByEmail != nil { - out.EntityType = 3 - out.Entity = direct.ValueOf(in.UserByEmail) + out := &api.Dataset{} + acccessList := []*api.DatasetAccess{} + for _, access := range in.Access { + curAccess := Access_ToAPI(mapCtx, direct.LazyPtr(access)) + acccessList = append(acccessList, curAccess) } - if in.SpecialGroup != nil { - out.EntityType = 4 - out.Entity = direct.ValueOf(in.SpecialGroup) - } - if in.IamMember != nil { - out.EntityType = 6 - out.Entity = direct.ValueOf(in.IamMember) + out.Access = acccessList + out.DefaultCollation = direct.ValueOf(in.DefaultCollation) + out.DefaultPartitionExpirationMs = direct.ValueOf(in.DefaultPartitionExpirationMs) + out.DefaultTableExpirationMs = direct.ValueOf(in.DefaultTableExpirationMs) + out.DefaultEncryptionConfiguration = EncryptionConfiguration_ToAPI(mapCtx, in.DefaultEncryptionConfiguration) + out.Description = direct.ValueOf(in.Description) + out.FriendlyName = direct.ValueOf(in.FriendlyName) + out.DatasetReference = DatasetReference_ToAPI(mapCtx, in, name) + out.Location = direct.ValueOf(in.Location) + out.IsCaseInsensitive = direct.ValueOf(in.IsCaseInsensitive) + if in.MaxTimeTravelHours != nil { + out.MaxTimeTravelHours, _ = strconv.ParseInt(direct.ValueOf(in.MaxTimeTravelHours), 10, 64) } - out.View = TableReference_ToProto(mapCtx, in.View) - out.Routine = RoutineReference_ToProto(mapCtx, in.Routine) - out.Dataset = DatasetAccessEntry_ToProto(mapCtx, in.Dataset) + out.StorageBillingModel = direct.ValueOf(in.StorageBillingModel) return out } -func Dataset_FromProto(mapCtx *direct.MapContext, in *pb.DatasetMetadata) *krm.Dataset { +func BigQueryDatasetSpec_FromAPI(mapCtx *direct.MapContext, in *api.Dataset) *krm.BigQueryDatasetSpec { if in == nil { return nil } - out := &krm.Dataset{} - out.Kind = direct.LazyPtr("BigQueryDataset") - out.Etag = direct.LazyPtr(in.ETag) - out.ID = direct.LazyPtr(in.FullID) - out.FriendlyName = direct.LazyPtr(in.Name) + out := &krm.BigQueryDatasetSpec{} + accessList := []krm.Access{} + for _, access := range in.Access { + curAccess := Access_FromAPI(mapCtx, access) + accessList = append(accessList, direct.ValueOf(curAccess)) + } + out.Access = accessList + out.DefaultCollation = direct.LazyPtr(in.DefaultCollation) + out.DefaultPartitionExpirationMs = direct.LazyPtr(in.DefaultPartitionExpirationMs) + out.DefaultTableExpirationMs = direct.LazyPtr(in.DefaultTableExpirationMs) + out.DefaultEncryptionConfiguration = EncryptionConfiguration_FromAPI(mapCtx, in.DefaultEncryptionConfiguration) out.Description = direct.LazyPtr(in.Description) - defaultTableExpirationMs := int64(in.DefaultTableExpiration / time.Millisecond) - out.DefaultTableExpirationMs = &defaultTableExpirationMs - defaultPartitionExpirationMs := int64(in.DefaultPartitionExpiration / time.Millisecond) - out.DefaultPartitionExpirationMs = &defaultPartitionExpirationMs - out.Labels = in.Labels - out.Access = direct.Slice_FromProto(mapCtx, in.Access, Access_FromProto) - //TODO: convert from time.Time to int64 - // out.CreationTime = in.CreationTime - // out.LastModifiedTime = in.LastModifiedTime - time.Now().UnixNano() + out.FriendlyName = direct.LazyPtr(in.FriendlyName) out.Location = direct.LazyPtr(in.Location) - out.DefaultEncryptionConfiguration = EncryptionConfiguration_FromProto(mapCtx, in.DefaultEncryptionConfig) - out.ExternalDatasetReference = ExternalDatasetReference_FromProto(mapCtx, in.ExternalDatasetReference) - out.DefaultCollation = direct.LazyPtr(in.DefaultCollation) - maxTimeTravelHours := (int64)(in.MaxTimeTravel / time.Hour) - out.MaxTimeTravelHours = &maxTimeTravelHours - out.Tags = direct.Slice_FromProto(mapCtx, in.Tags, DatasetTag_FromProto) + out.IsCaseInsensitive = direct.LazyPtr(in.IsCaseInsensitive) + maxTime := strconv.FormatInt(in.MaxTimeTravelHours, 10) + out.MaxTimeTravelHours = direct.LazyPtr(maxTime) out.StorageBillingModel = direct.LazyPtr(in.StorageBillingModel) return out } -func Dataset_ToProto(mapCtx *direct.MapContext, in *krm.Dataset) *pb.DatasetMetadata { +func BigQueryDatasetStatus_FromAPI(mapCtx *direct.MapContext, in *api.Dataset) *krm.BigQueryDatasetStatus { if in == nil { return nil } - out := &pb.DatasetMetadata{} - out.ETag = direct.ValueOf(in.Etag) - out.FullID = direct.ValueOf(in.ID) - out.Name = direct.ValueOf(in.FriendlyName) - out.Description = direct.ValueOf(in.Description) - out.DefaultTableExpiration = time.Duration(*in.DefaultTableExpirationMs) * time.Millisecond - out.DefaultPartitionExpiration = time.Duration(*in.DefaultPartitionExpirationMs) * time.Millisecond - out.Labels = in.Labels - out.Access = direct.Slice_ToProto(mapCtx, in.Access, Access_ToProto) - out.CreationTime = time.UnixMilli(*in.CreationTime) - out.LastModifiedTime = time.UnixMilli(*in.LastModifiedTime) - out.Location = direct.ValueOf(in.Location) - out.DefaultEncryptionConfig = EncryptionConfiguration_ToProto(mapCtx, in.DefaultEncryptionConfiguration) - out.ExternalDatasetReference = ExternalDatasetReference_ToProto(mapCtx, in.ExternalDatasetReference) - out.DefaultCollation = *in.DefaultCollation - out.MaxTimeTravel = time.Duration(*in.MaxTimeTravelHours) * time.Hour - out.Tags = direct.Slice_ToProto(mapCtx, in.Tags, DatasetTag_ToProto) - out.StorageBillingModel = direct.ValueOf(in.StorageBillingModel) + out := &krm.BigQueryDatasetStatus{} + out.Etag = direct.LazyPtr(in.Etag) + out.CreationTime = direct.LazyPtr(in.CreationTime) + out.LastModifiedTime = direct.LazyPtr(in.LastModifiedTime) + out.SelfLink = direct.LazyPtr(in.SelfLink) return out } -func DatasetAccessEntry_FromProto(mapCtx *direct.MapContext, in *pb.DatasetAccessEntry) *krm.DatasetAccessEntry { +func BigQueryDatasetStatusObservedState_ToAPI(mapCtx *direct.MapContext, in *krm.BigQueryDatasetStatus) *api.Dataset { if in == nil { return nil } - out := &krm.DatasetAccessEntry{} - out.Dataset = DatasetReference_FromProto(mapCtx, in.Dataset) - out.TargetTypes = in.TargetTypes + out := &api.Dataset{} + out.Etag = direct.ValueOf(in.Etag) + out.CreationTime = direct.ValueOf(in.CreationTime) + out.LastModifiedTime = direct.ValueOf(in.LastModifiedTime) + out.SelfLink = direct.ValueOf(in.SelfLink) return out } -func DatasetAccessEntry_ToProto(mapCtx *direct.MapContext, in *krm.DatasetAccessEntry) *pb.DatasetAccessEntry { +func Access_ToAPI(mapCtx *direct.MapContext, in *krm.Access) *api.DatasetAccess { if in == nil { return nil } - out := &pb.DatasetAccessEntry{} - out.Dataset = DatasetReference_ToProto(mapCtx, in.Dataset) - out.TargetTypes = in.TargetTypes + out := &api.DatasetAccess{} + out.Domain = direct.ValueOf(in.Domain) + out.GroupByEmail = direct.ValueOf(in.GroupByEmail) + out.IamMember = direct.ValueOf(in.IamMember) + out.UserByEmail = direct.ValueOf(in.UserByEmail) + out.SpecialGroup = direct.ValueOf(in.SpecialGroup) + out.Role = direct.ValueOf(in.Role) + out.Dataset = DatasetAccessEntry_ToAPI(mapCtx, in.Dataset) + out.Routine = RoutineReference_ToAPI(mapCtx, in.Routine) + out.View = TableReference_ToAPI(mapCtx, in.View) return out } -func DatasetList_FromProto(mapCtx *direct.MapContext, in *pb.DatasetIterator) *krm.DatasetList { +func Access_FromAPI(mapCtx *direct.MapContext, in *api.DatasetAccess) *krm.Access { if in == nil { return nil } - out := &krm.DatasetList{} - in.ListHidden = true - out.Kind = direct.LazyPtr("BigQueryDataset") - var datasets []krm.ListFormatDataset - var next *pb.Dataset - next, _ = in.Next() - for next != nil { - datasets = append(datasets, *ListFormatDataset_FromProto(mapCtx, next)) - next, _ = in.Next() - } - out.Datasets = datasets - + out := &krm.Access{} + out.Domain = direct.LazyPtr(in.Domain) + out.GroupByEmail = direct.LazyPtr(in.GroupByEmail) + out.IamMember = direct.LazyPtr(in.IamMember) + out.UserByEmail = direct.LazyPtr(in.UserByEmail) + out.SpecialGroup = direct.LazyPtr(in.SpecialGroup) + out.Role = direct.LazyPtr(in.Role) + out.Dataset = DatasetAccessEntry_FromAPI(mapCtx, in.Dataset) + out.Routine = RoutineReference_FromAPI(mapCtx, in.Routine) + out.View = TableReference_FromAPI(mapCtx, in.View) return out } -func DatasetList_ToProto(mapCtx *direct.MapContext, in *krm.DatasetList) *pb.DatasetIterator { +func DatasetAccessEntry_FromAPI(mapCtx *direct.MapContext, in *api.DatasetAccessEntry) *krm.DatasetAccessEntry { if in == nil { return nil } - out := &pb.DatasetIterator{} - // Missing - return out -} -func DatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.Dataset) *krm.DatasetReference { - if in == nil { - return nil + out := &krm.DatasetAccessEntry{} + out.Dataset = &krm.DatasetReference{ + DatasetId: direct.LazyPtr(in.Dataset.DatasetId), + ProjectId: direct.LazyPtr(in.Dataset.ProjectId), } - out := &krm.DatasetReference{} - out.DatasetId = &in.DatasetID - out.ProjectId = &in.ProjectID + out.TargetTypes = in.TargetTypes return out } -func DatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.DatasetReference) *pb.Dataset { +func DatasetAccessEntry_ToAPI(mapCtx *direct.MapContext, in *krm.DatasetAccessEntry) *api.DatasetAccessEntry { if in == nil { return nil } - out := &pb.Dataset{} - out.DatasetID = *in.DatasetId - out.ProjectID = *in.ProjectId - return out -} -func DatasetTag_FromProto(mapCtx *direct.MapContext, in *pb.DatasetTag) *krm.GcpTag { - if in == nil { - return nil + out := &api.DatasetAccessEntry{} + out.Dataset = &api.DatasetReference{ + DatasetId: direct.ValueOf(in.Dataset.DatasetId), + ProjectId: direct.ValueOf(in.Dataset.ProjectId), } - out := &krm.GcpTag{} - out.TagKey = direct.LazyPtr(in.TagKey) - out.TagValue = direct.LazyPtr(in.TagValue) + out.TargetTypes = in.TargetTypes return out } -func DatasetTag_ToProto(mapCtx *direct.MapContext, in *krm.GcpTag) *pb.DatasetTag { +func DatasetReference_ToAPI(mapCtx *direct.MapContext, in *krm.BigQueryDatasetSpec, name string) *api.DatasetReference { if in == nil { return nil } - out := &pb.DatasetTag{} - out.TagKey = direct.ValueOf(in.TagKey) - out.TagValue = direct.ValueOf(in.TagValue) + out := &api.DatasetReference{} + out.DatasetId = name return out } -func EncryptionConfiguration_FromProto(mapCtx *direct.MapContext, in *pb.EncryptionConfig) *krm.EncryptionConfiguration { +func EncryptionConfiguration_ToAPI(mapCtx *direct.MapContext, in *krm.EncryptionConfiguration) *api.EncryptionConfiguration { if in == nil { return nil } - out := &krm.EncryptionConfiguration{} - out.KmsKeyRef = &v1beta1.KMSCryptoKeyRef{ - Name: in.KMSKeyName, + out := &api.EncryptionConfiguration{} + if in.KmsKeyRef != nil { + out.KmsKeyName = in.KmsKeyRef.External } return out } -func EncryptionConfiguration_ToProto(mapCtx *direct.MapContext, in *krm.EncryptionConfiguration) *pb.EncryptionConfig { - if in == nil { - return nil - } - out := &pb.EncryptionConfig{} - out.KMSKeyName = in.KmsKeyRef.Name - return out -} -func ErrorProto_FromProto(mapCtx *direct.MapContext, in *pb.Error) *krm.ErrorProto { - if in == nil { - return nil - } - out := &krm.ErrorProto{} - out.Reason = direct.LazyPtr(in.Reason) - out.Location = direct.LazyPtr(in.Location) - out.Message = direct.LazyPtr(in.Message) - return out -} -func ErrorProto_ToProto(mapCtx *direct.MapContext, in *krm.ErrorProto) *pb.Error { +func EncryptionConfiguration_FromAPI(mapCtx *direct.MapContext, in *api.EncryptionConfiguration) *krm.EncryptionConfiguration { if in == nil { return nil } - out := &pb.Error{} - out.Reason = direct.ValueOf(in.Reason) - out.Location = direct.ValueOf(in.Location) - out.Message = direct.ValueOf(in.Message) - return out -} -func ExternalDatasetReference_FromProto(mapCtx *direct.MapContext, in *pb.ExternalDatasetReference) *krm.ExternalDatasetReference { - if in == nil { - return nil - } - out := &krm.ExternalDatasetReference{} - out.ExternalSource = direct.LazyPtr(in.ExternalSource) - out.Connection = direct.LazyPtr(in.Connection) - return out -} -func ExternalDatasetReference_ToProto(mapCtx *direct.MapContext, in *krm.ExternalDatasetReference) *pb.ExternalDatasetReference { - if in == nil { - return nil - } - out := &pb.ExternalDatasetReference{} - out.ExternalSource = direct.ValueOf(in.ExternalSource) - out.Connection = direct.ValueOf(in.Connection) - return out -} -func ListFormatDataset_FromProto(mapCtx *direct.MapContext, in *pb.Dataset) *krm.ListFormatDataset { - if in == nil { - return nil - } - out := &krm.ListFormatDataset{} - out.Kind = direct.LazyPtr("BigQueryDataset") - out.DatasetReference = DatasetReference_FromProto(mapCtx, in) - return out -} -func ListFormatDataset_ToProto(mapCtx *direct.MapContext, in *krm.ListFormatDataset) *pb.Dataset { - if in == nil { - return nil + out := &krm.EncryptionConfiguration{} + out.KmsKeyRef = &v1beta1.KMSCryptoKeyRef{ + External: in.KmsKeyName, } - out := &pb.Dataset{} - out = DatasetReference_ToProto(mapCtx, in.DatasetReference) return out } -func RoutineReference_FromProto(mapCtx *direct.MapContext, in *pb.Routine) *krm.RoutineReference { +func RoutineReference_FromAPI(mapCtx *direct.MapContext, in *api.RoutineReference) *krm.RoutineReference { if in == nil { return nil } out := &krm.RoutineReference{} - out.DatasetId = &in.DatasetID - out.ProjectId = &in.ProjectID - out.RoutineId = &in.RoutineID + out.DatasetId = direct.LazyPtr(in.DatasetId) + out.ProjectId = direct.LazyPtr(in.ProjectId) + out.RoutineId = direct.LazyPtr(in.RoutineId) return out } -func RoutineReference_ToProto(mapCtx *direct.MapContext, in *krm.RoutineReference) *pb.Routine { +func RoutineReference_ToAPI(mapCtx *direct.MapContext, in *krm.RoutineReference) *api.RoutineReference { if in == nil { return nil } - out := &pb.Routine{} - out.DatasetID = *in.DatasetId - out.ProjectID = *in.ProjectId - out.RoutineID = *in.RoutineId + out := &api.RoutineReference{} + out.DatasetId = direct.ValueOf(in.DatasetId) + out.ProjectId = direct.ValueOf(in.ProjectId) + out.RoutineId = direct.ValueOf(in.RoutineId) return out } -func TableReference_FromProto(mapCtx *direct.MapContext, in *pb.Table) *krm.TableReference { +func TableReference_FromAPI(mapCtx *direct.MapContext, in *api.TableReference) *krm.TableReference { if in == nil { return nil } out := &krm.TableReference{} - out.DatasetId = &in.DatasetID - out.ProjectId = &in.ProjectID - out.TableId = &in.TableID + out.DatasetId = direct.LazyPtr(in.DatasetId) + out.ProjectId = direct.LazyPtr(in.ProjectId) + out.TableId = direct.LazyPtr(in.TableId) return out } -func TableReference_ToProto(mapCtx *direct.MapContext, in *krm.TableReference) *pb.Table { +func TableReference_ToAPI(mapCtx *direct.MapContext, in *krm.TableReference) *api.TableReference { if in == nil { return nil } - out := &pb.Table{} - out.DatasetID = *in.DatasetId - out.ProjectID = *in.ProjectId - out.TableID = *in.TableId + out := &api.TableReference{} + out.DatasetId = direct.ValueOf(in.DatasetId) + out.ProjectId = direct.ValueOf(in.ProjectId) + out.TableId = direct.ValueOf(in.TableId) return out } diff --git a/pkg/controller/direct/bigquerydataset/dataset_controller.go b/pkg/controller/direct/bigquerydataset/dataset_controller.go new file mode 100644 index 0000000000..9b3c36786f --- /dev/null +++ b/pkg/controller/direct/bigquerydataset/dataset_controller.go @@ -0,0 +1,315 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquerydataset + +import ( + "context" + "fmt" + "reflect" + + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" + refs "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/directbase" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/registry" + + clone "github.com/huandu/go-clone" + api "google.golang.org/api/bigquery/v2" + "google.golang.org/api/option" + "google.golang.org/protobuf/types/known/fieldmaskpb" + + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/klog/v2" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +const ( + ctrlName = "bigquery-controller" + serviceDomain = "//bigquery.googleapis.com" +) + +func init() { + registry.RegisterModel(krm.BigQueryDatasetGVK, NewModel) +} + +func NewModel(ctx context.Context, config *config.ControllerConfig) (directbase.Model, error) { + return &model{config: *config}, nil +} + +var _ directbase.Model = &model{} + +type model struct { + config config.ControllerConfig +} + +func (m *model) service(ctx context.Context) (*api.Service, error) { + var opts []option.ClientOption + opts, err := m.config.RESTClientOptions() + if err != nil { + return nil, err + } + gcpService, err := api.NewService(ctx, opts...) + if err != nil { + return nil, fmt.Errorf("building Dataset client: %w", err) + } + return gcpService, err +} + +func (m *model) AdapterForObject(ctx context.Context, reader client.Reader, u *unstructured.Unstructured) (directbase.Adapter, error) { + obj := &krm.BigQueryDataset{} + if err := runtime.DefaultUnstructuredConverter.FromUnstructured(u.Object, &obj); err != nil { + return nil, fmt.Errorf("error converting to %T: %w", obj, err) + } + + id, err := krm.NewBigQueryDatasetRef(ctx, reader, obj) + if err != nil { + return nil, err + } + + // Get bigquery GCP client + gcpService, err := m.service(ctx) + if err != nil { + return nil, err + } + return &Adapter{ + id: id, + gcpService: gcpService, + desired: obj, + reader: reader, + }, nil +} + +func (m *model) AdapterForURL(ctx context.Context, url string) (directbase.Adapter, error) { + // TODO: Support URLs + return nil, nil +} + +type Adapter struct { + id *krm.BigQueryDatasetRef + gcpService *api.Service + desired *krm.BigQueryDataset + actual *api.Dataset + reader client.Reader +} + +var _ directbase.Adapter = &Adapter{} + +func (a *Adapter) Find(ctx context.Context) (bool, error) { + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("getting BigQueryDataset", "name", a.id.External) + + parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return false, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + datasetGetCall := a.gcpService.Datasets.Get(parent.ProjectID, datasetId) + datasetpb, err := datasetGetCall.Do() + if err != nil { + if direct.IsNotFound(err) { + return false, nil + } + return false, fmt.Errorf("getting BigQueryDataset %q: %w", a.id.External, err) + } + a.actual = datasetpb + return true, nil +} + +func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperation) error { + + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("creating Dataset", "name", a.id.External) + mapCtx := &direct.MapContext{} + + desiredDataset := BigQueryDatasetSpec_ToAPI(mapCtx, &a.desired.Spec, a.desired.Name) + desiredDataset.Labels = make(map[string]string) + for k, v := range a.desired.GetObjectMeta().GetLabels() { + desiredDataset.Labels[k] = v + } + desiredDataset.Labels["managed-by-cnrm"] = "true" + parent, _, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + // Resolve KMS key reference + if a.desired.Spec.DefaultEncryptionConfiguration != nil { + kmsRef, err := refs.ResolveKMSCryptoKeyRef(ctx, a.reader, a.desired, a.desired.Spec.DefaultEncryptionConfiguration.KmsKeyRef) + if err != nil { + return err + } + desiredDataset.DefaultEncryptionConfiguration.KmsKeyName = kmsRef.External + } + insertDatasetCall := a.gcpService.Datasets.Insert(parent.ProjectID, desiredDataset) + inserted, err := insertDatasetCall.Do() + if err != nil { + return fmt.Errorf("inserting Dataset %s: %w", a.id.External, err) + } + log.V(2).Info("successfully inserted Dataset", "name", a.id.External) + + status := &krm.BigQueryDatasetStatus{} + status = BigQueryDatasetStatus_FromAPI(mapCtx, inserted) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + status.ExternalRef = &a.id.External + return createOp.UpdateStatus(ctx, status, nil) +} + +func (a *Adapter) Update(ctx context.Context, updateOp *directbase.UpdateOperation) error { + u := updateOp.GetUnstructured() + + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("updating Dataset", "name", a.id.External) + mapCtx := &direct.MapContext{} + + // Convert KRM object to proto message + desiredKRM := a.desired.DeepCopy() + desired := BigQueryDatasetSpec_ToAPI(mapCtx, &desiredKRM.Spec, desiredKRM.Name) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + + resource := clone.Clone(a.actual).(*api.Dataset) + + // Check for immutable fields + if !reflect.DeepEqual(desired.Location, resource.Location) { + return fmt.Errorf("BigQueryDataset %s/%s location cannot be changed, actual: %s, desired: %s", u.GetNamespace(), u.GetName(), resource.Location, desired.Location) + } + + // Find diff + updateMask := &fieldmaskpb.FieldMask{} + if !reflect.DeepEqual(desired.Description, resource.Description) { + resource.Description = desired.Description + updateMask.Paths = append(updateMask.Paths, "description") + } + if !reflect.DeepEqual(desired.FriendlyName, resource.FriendlyName) { + resource.FriendlyName = desired.FriendlyName + updateMask.Paths = append(updateMask.Paths, "friendly_name") + } + if !reflect.DeepEqual(desired.DefaultPartitionExpirationMs, resource.DefaultPartitionExpirationMs) { + resource.DefaultPartitionExpirationMs = desired.DefaultPartitionExpirationMs + updateMask.Paths = append(updateMask.Paths, "default_partition_expirationMs") + } + if !reflect.DeepEqual(desired.DefaultTableExpirationMs, resource.DefaultTableExpirationMs) { + resource.DefaultTableExpirationMs = desired.DefaultTableExpirationMs + updateMask.Paths = append(updateMask.Paths, "default_table_expirationMs") + } + if !reflect.DeepEqual(desired.DefaultCollation, resource.DefaultCollation) { + resource.DefaultCollation = desired.DefaultCollation + updateMask.Paths = append(updateMask.Paths, "default_collation") + } + if desired.DefaultEncryptionConfiguration != nil && resource.DefaultEncryptionConfiguration != nil && !reflect.DeepEqual(desired.DefaultEncryptionConfiguration, resource.DefaultEncryptionConfiguration) { + // Resolve KMS key reference + if a.desired.Spec.DefaultEncryptionConfiguration != nil { + kmsRef, err := refs.ResolveKMSCryptoKeyRef(ctx, a.reader, a.desired, a.desired.Spec.DefaultEncryptionConfiguration.KmsKeyRef) + if err != nil { + return err + } + desired.DefaultEncryptionConfiguration.KmsKeyName = kmsRef.External + } + resource.DefaultEncryptionConfiguration.KmsKeyName = desired.DefaultEncryptionConfiguration.KmsKeyName + updateMask.Paths = append(updateMask.Paths, "default_encryption_configuration") + } + if !reflect.DeepEqual(desired.IsCaseInsensitive, resource.IsCaseInsensitive) { + resource.IsCaseInsensitive = desired.IsCaseInsensitive + updateMask.Paths = append(updateMask.Paths, "is_case_sensitive") + } + if !reflect.DeepEqual(desired.MaxTimeTravelHours, resource.MaxTimeTravelHours) { + resource.MaxTimeTravelHours = desired.MaxTimeTravelHours + updateMask.Paths = append(updateMask.Paths, "max_time_interval_hours") + } + if desired.Access != nil && resource.Access != nil && len(desired.Access) > 0 && !reflect.DeepEqual(desired.Access, resource.Access) { + for _, access := range desired.Access { + resource.Access = append(resource.Access, access) + } + updateMask.Paths = append(updateMask.Paths, "access") + } + if !reflect.DeepEqual(desired.StorageBillingModel, resource.StorageBillingModel) { + resource.StorageBillingModel = desired.StorageBillingModel + updateMask.Paths = append(updateMask.Paths, "storage_billing_model") + } + + if len(updateMask.Paths) == 0 { + return nil + } + parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + + if desired.Access == nil || len(desired.Access) == 0 { + resource.Access = a.actual.Access + } + updateDatasetCall := a.gcpService.Datasets.Update(parent.ProjectID, datasetId, resource) + updated, err := updateDatasetCall.Do() + if err != nil { + return fmt.Errorf("updating Dataset %s: %w", a.id.External, err) + } + log.V(2).Info("successfully updated Dataset", "name", a.id.External) + + status := &krm.BigQueryDatasetStatus{} + status = BigQueryDatasetStatus_FromAPI(mapCtx, updated) + if mapCtx.Err() != nil { + return mapCtx.Err() + } + return updateOp.UpdateStatus(ctx, status, nil) +} + +func (a *Adapter) Export(ctx context.Context) (*unstructured.Unstructured, error) { + if a.actual == nil { + return nil, fmt.Errorf("Find() not called") + } + u := &unstructured.Unstructured{} + + obj := &krm.BigQueryDataset{} + mapCtx := &direct.MapContext{} + obj.Spec = direct.ValueOf(BigQueryDatasetSpec_FromAPI(mapCtx, a.actual)) + if mapCtx.Err() != nil { + return nil, mapCtx.Err() + } + parent, _, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return nil, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + + obj.Spec.ProjectRef = &refs.ProjectRef{Name: parent.ProjectID} + obj.Spec.Location = &parent.Location + uObj, err := runtime.DefaultUnstructuredConverter.ToUnstructured(obj) + if err != nil { + return nil, err + } + u.Object = uObj + return u, nil +} + +// Delete implements the Adapter interface. +func (a *Adapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { + log := klog.FromContext(ctx).WithName(ctrlName) + log.V(2).Info("deleting Dataset", "name", a.id.External) + + parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) + if err != nil { + return false, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) + } + deleteDatasetCall := a.gcpService.Datasets.Delete(parent.ProjectID, datasetId) + err = deleteDatasetCall.Do() + if err != nil { + return false, fmt.Errorf("deleting Dataset %s: %w", a.id.External, err) + } + log.V(2).Info("successfully deleted Dataset", "name", a.id.External) + + return true, nil +} diff --git a/pkg/controller/direct/bigquerydataset/dataset_externalresource.go b/pkg/controller/direct/bigquerydataset/dataset_externalresource.go new file mode 100644 index 0000000000..74ccbc2c9e --- /dev/null +++ b/pkg/controller/direct/bigquerydataset/dataset_externalresource.go @@ -0,0 +1,25 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquerydataset + +import ( + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" +) + +// AsExternalRef builds a externalRef from a BigQueryDataTransferConfig +func AsExternalRef(datasetRef *krm.BigQueryDatasetRef) *string { + e := serviceDomain + "/" + datasetRef.External + return &e +} diff --git a/pkg/controller/direct/bigquerydataset/utils.go b/pkg/controller/direct/bigquerydataset/utils.go new file mode 100644 index 0000000000..71e4335e1e --- /dev/null +++ b/pkg/controller/direct/bigquerydataset/utils.go @@ -0,0 +1,57 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bigquerydataset + +import ( + "encoding/json" + "fmt" + + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/reflect/protoreflect" +) + +func convertProtoToAPI(u protoreflect.ProtoMessage, v any) error { + if u == nil { + return nil + } + + j, err := protojson.Marshal(u) + if err != nil { + return fmt.Errorf("converting proto to json: %w", err) + } + + if err := json.Unmarshal(j, v); err != nil { + return fmt.Errorf("converting json to cloud API type: %w", err) + } + return nil +} + +func convertAPIToProto[V protoreflect.ProtoMessage](u any, pV *V) error { + if u == nil { + return nil + } + + j, err := json.Marshal(u) + if err != nil { + return fmt.Errorf("converting proto to json: %w", err) + } + + var v V + if err := json.Unmarshal(j, &v); err != nil { + return fmt.Errorf("converting json to proto type: %w", err) + } + *pV = v + return nil +} diff --git a/pkg/controller/direct/register/register.go b/pkg/controller/direct/register/register.go index 81fc73bf53..5a478ad8e4 100644 --- a/pkg/controller/direct/register/register.go +++ b/pkg/controller/direct/register/register.go @@ -19,6 +19,7 @@ import ( _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/apikeys" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigqueryanalyticshub" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigqueryconnection" + _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigquerydataset" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/bigquerydatatransfer" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/certificatemanager" _ "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct/cloudbuild" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_export_basicbigquerydataset-direct.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_export_basicbigquerydataset-direct.golden new file mode 100644 index 0000000000..6c64b9259f --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_export_basicbigquerydataset-direct.golden @@ -0,0 +1,25 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + cnrm.cloud.google.com/delete-contents-on-destroy: "false" + labels: + cnrm-test: "true" + managed-by-cnrm: "true" + name: bigquerydatasetsample${uniqueId} +spec: + access: + - role: OWNER + specialGroup: projectOwners + - role: OWNER + userByEmail: user@google.com + - role: READER + specialGroup: projectReaders + - role: WRITER + specialGroup: projectWriters + friendlyName: bigquerydataset-sample-updated + location: us-central1 + maxTimeTravelHours: "168" + projectRef: + external: ${projectId} + resourceID: bigquerydatasetsample${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_object_basicbigquerydataset-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_object_basicbigquerydataset-direct.golden.yaml new file mode 100644 index 0000000000..34f4f241a1 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_generated_object_basicbigquerydataset-direct.golden.yaml @@ -0,0 +1,32 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: bigquerydatasetsample${uniqueId} + namespace: ${uniqueId} +spec: + friendlyName: bigquerydataset-sample-updated + location: us-central1 + projectRef: + external: ${projectId} +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + creationTime: "1970-01-01T00:00:00Z" + etag: abcdef123456 + externalRef: projects/${projectId}/locations/us-central1/datasets/bigquerydatasetsample${uniqueId} + lastModifiedTime: "1970-01-01T00:00:00Z" + observedGeneration: 2 + selfLink: https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_http.log new file mode 100644 index 0000000000..6b3fc1f9c8 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/_http.log @@ -0,0 +1,369 @@ +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Not found: Dataset ${projectId}:bigquerydatasetsample${uniqueId}", + "reason": "notFound" + } + ], + "message": "Not found: Dataset ${projectId}:bigquerydatasetsample${uniqueId}", + "status": "NOT_FOUND" + } +} + +--- + +POST https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}" + }, + "friendlyName": "bigquerydataset-sample", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "location": "us-central1" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +PUT https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydatasetsample${uniqueId}", + "projectId": "${projectId}" + }, + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-sample-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "us-central1", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", + "type": "DEFAULT" +} + +--- + +DELETE https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +204 No Content +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/create.yaml new file mode 100644 index 0000000000..8598aa3435 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/create.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatasetsample${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/update.yaml new file mode 100644 index 0000000000..c1e87a2805 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset-direct/update.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatasetsample${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_export_basicbigquerydataset.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml index 9ca8d07847..e1b26c8300 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_generated_object_basicbigquerydataset.golden.yaml @@ -14,6 +14,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml index 94f172a61a..ad18e71f5a 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/create.yaml @@ -18,3 +18,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml index 5038f6c984..461dc1354f 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/basicbigquerydataset/update.yaml @@ -18,3 +18,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_export_bigquerydatasetaccessblock-direct.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_export_bigquerydatasetaccessblock-direct.golden new file mode 100644 index 0000000000..778d0db239 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_export_bigquerydatasetaccessblock-direct.golden @@ -0,0 +1,25 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + cnrm.cloud.google.com/delete-contents-on-destroy: "false" + labels: + cnrm-test: "true" + managed-by-cnrm: "true" + name: bigquerydataset${uniqueId} +spec: + access: + - domain: google.com + role: READER + - iamMember: allUsers + role: READER + - role: OWNER + specialGroup: projectOwners + defaultTableExpirationMs: 7200000 + description: BigQuery Dataset With Access Block v2 + friendlyName: bigquerydataset-accessblock + location: US + maxTimeTravelHours: "168" + projectRef: + external: ${projectId} + resourceID: bigquerydataset${uniqueId} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_object_bigquerydatasetaccessblock-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_object_bigquerydatasetaccessblock-direct.golden.yaml new file mode 100644 index 0000000000..91d9811539 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_generated_object_bigquerydatasetaccessblock-direct.golden.yaml @@ -0,0 +1,41 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: bigquerydataset${uniqueId} + namespace: ${uniqueId} +spec: + access: + - domain: google.com + role: READER + - iamMember: allUsers + role: READER + - role: OWNER + specialGroup: projectOwners + defaultTableExpirationMs: 7200000 + description: BigQuery Dataset With Access Block v2 + friendlyName: bigquerydataset-accessblock + location: US + projectRef: + external: ${projectId} +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + creationTime: "1970-01-01T00:00:00Z" + etag: abcdef123456 + externalRef: projects/${projectId}/locations/US/datasets/bigquerydataset${uniqueId} + lastModifiedTime: "1970-01-01T00:00:00Z" + observedGeneration: 2 + selfLink: https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_http.log new file mode 100644 index 0000000000..5dec562437 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/_http.log @@ -0,0 +1,393 @@ +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "reason": "notFound" + } + ], + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "status": "NOT_FOUND" + } +} + +--- + +POST https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}" + }, + "defaultTableExpirationMs": "3600000", + "description": "BigQuery Dataset With Access Block", + "friendlyName": "bigquerydataset-accessblock", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "location": "US" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "3600000", + "description": "BigQuery Dataset With Access Block", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "3600000", + "description": "BigQuery Dataset With Access Block", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +PUT https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "iamMember": "allUsers", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultTableExpirationMs": "7200000", + "description": "BigQuery Dataset With Access Block v2", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-accessblock", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "168", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "type": "DEFAULT" +} + +--- + +DELETE https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +204 No Content +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/create.yaml new file mode 100644 index 0000000000..064707f437 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/create.yaml @@ -0,0 +1,32 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + defaultTableExpirationMs: 3600000 + description: "BigQuery Dataset With Access Block" + friendlyName: bigquerydataset-accessblock + location: US + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + - role: READER + domain: google.com diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/update.yaml new file mode 100644 index 0000000000..68a8e31750 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock-direct/update.yaml @@ -0,0 +1,34 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + defaultTableExpirationMs: 7200000 + description: "BigQuery Dataset With Access Block v2" + friendlyName: bigquerydataset-accessblock + location: US + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + - role: READER + iamMember: allUsers + - role: READER + domain: google.com \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml index 12584981ac..925b8db34d 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/create.yaml @@ -21,6 +21,8 @@ spec: description: "BigQuery Dataset With Access Block" friendlyName: bigquerydataset-accessblock location: US + projectRef: + external: ${projectId} access: - role: OWNER specialGroup: projectOwners diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml index 5c6ed42223..e49cc61aae 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/bigquerydatasetaccessblock/update.yaml @@ -21,6 +21,8 @@ spec: description: "BigQuery Dataset With Access Block v2" friendlyName: bigquerydataset-accessblock location: US + projectRef: + external: ${projectId} access: - role: OWNER specialGroup: projectOwners diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_export_fullybigquerydataset-direct.golden b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_export_fullybigquerydataset-direct.golden new file mode 100644 index 0000000000..0523cbdbdf --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_export_fullybigquerydataset-direct.golden @@ -0,0 +1,30 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + cnrm.cloud.google.com/delete-contents-on-destroy: "false" + labels: + cnrm-test: "true" + managed-by-cnrm: "true" + name: bigquerydataset${uniqueId} +spec: + access: + - domain: google.com + role: READER + - role: OWNER + specialGroup: projectOwners + - role: OWNER + userByEmail: user@google.com + defaultEncryptionConfiguration: + kmsKeyRef: + external: projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId} + defaultPartitionExpirationMs: 3800000 + defaultTableExpirationMs: 3800000 + description: Fully Configured BigQuery Dataset updated + friendlyName: bigquerydataset-fullyconfigured-updated + location: US + maxTimeTravelHours: "96" + projectRef: + external: ${projectId} + resourceID: bigquerydataset${uniqueId} + storageBillingModel: LOGICAL \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_object_fullybigquerydataset-direct.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_object_fullybigquerydataset-direct.golden.yaml new file mode 100644 index 0000000000..e717654a27 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_generated_object_fullybigquerydataset-direct.golden.yaml @@ -0,0 +1,49 @@ +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + annotations: + alpha.cnrm.cloud.google.com/reconciler: direct + cnrm.cloud.google.com/management-conflict-prevention-policy: none + finalizers: + - cnrm.cloud.google.com/finalizer + - cnrm.cloud.google.com/deletion-defender + generation: 2 + labels: + cnrm-test: "true" + name: bigquerydataset${uniqueId} + namespace: ${uniqueId} +spec: + access: + - domain: google.com + role: READER + - role: OWNER + specialGroup: projectOwners + - role: OWNER + userByEmail: user@google.com + defaultCollation: "" + defaultEncryptionConfiguration: + kmsKeyRef: + name: kmscryptokey-${uniqueId} + defaultPartitionExpirationMs: 3800000 + defaultTableExpirationMs: 3800000 + description: Fully Configured BigQuery Dataset updated + friendlyName: bigquerydataset-fullyconfigured-updated + isCaseInsensitive: false + location: US + maxTimeTravelHours: "96" + projectRef: + external: ${projectId} + storageBillingModel: LOGICAL +status: + conditions: + - lastTransitionTime: "1970-01-01T00:00:00Z" + message: The resource is up to date + reason: UpToDate + status: "True" + type: Ready + creationTime: "1970-01-01T00:00:00Z" + etag: abcdef123456 + externalRef: projects/${projectId}/locations/US/datasets/bigquerydataset${uniqueId} + lastModifiedTime: "1970-01-01T00:00:00Z" + observedGeneration: 2 + selfLink: https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_http.log new file mode 100644 index 0000000000..4e7cf5c240 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/_http.log @@ -0,0 +1,999 @@ +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "KeyRing projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId} not found.", + "status": "NOT_FOUND" + } +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings?alt=json&keyRingId=kmskeyring-${uniqueId} +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}" +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}" +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "message": "CryptoKey projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId} not found.", + "status": "NOT_FOUND" + } +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys?alt=json&cryptoKeyId=kmscryptokey-${uniqueId}&skipInitialVersionCreation=false +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "purpose": "ENCRYPT_DECRYPT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyScheduledDuration": "2592000s", + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}", + "primary": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + }, + "purpose": "ENCRYPT_DECRYPT", + "versionTemplate": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "protectionLevel": "SOFTWARE" + } +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyScheduledDuration": "2592000s", + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}", + "primary": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + }, + "purpose": "ENCRYPT_DECRYPT", + "versionTemplate": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "protectionLevel": "SOFTWARE" + } +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:getIamPolicy?alt=json&options.requestedPolicyVersion=3&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "etag": "abcdef0123A=" +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:setIamPolicy?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "policy": { + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "version": 3 + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:getIamPolicy?alt=json&options.requestedPolicyVersion=3&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Unknown service account", + "reason": "notFound" + } + ], + "message": "Unknown service account", + "status": "NOT_FOUND" + } +} + +--- + +POST https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "accountId": "bigquerydataset-dep", + "serviceAccount": {} +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +404 Not Found +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "error": { + "code": 404, + "errors": [ + { + "domain": "global", + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "reason": "notFound" + } + ], + "message": "Not found: Dataset ${projectId}:bigquerydataset${uniqueId}", + "status": "NOT_FOUND" + } +} + +--- + +POST https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + } + ], + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}" + }, + "defaultCollation": "und:ci", + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3600000", + "defaultTableExpirationMs": "3600000", + "description": "Fully Configured BigQuery Dataset", + "friendlyName": "bigquerydataset-fullyconfigured", + "isCaseInsensitive": true, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "location": "US", + "maxTimeTravelHours": "72", + "storageBillingModel": "LOGICAL" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultCollation": "und:ci", + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3600000", + "defaultTableExpirationMs": "3600000", + "description": "Fully Configured BigQuery Dataset", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured", + "id": "000000000000000000000", + "isCaseInsensitive": true, + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "72", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultCollation": "und:ci", + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3600000", + "defaultTableExpirationMs": "3600000", + "description": "Fully Configured BigQuery Dataset", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured", + "id": "000000000000000000000", + "isCaseInsensitive": true, + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "72", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +PUT https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: kcc/controller-manager + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "domain": "google.com", + "role": "READER" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +GET https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "access": [ + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "user@google.com" + }, + { + "domain": "google.com", + "role": "READER" + } + ], + "creationTime": "123456789", + "datasetReference": { + "datasetId": "bigquerydataset${uniqueId}", + "projectId": "${projectId}" + }, + "defaultEncryptionConfiguration": { + "kmsKeyName": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}" + }, + "defaultPartitionExpirationMs": "3800000", + "defaultTableExpirationMs": "3800000", + "description": "Fully Configured BigQuery Dataset updated", + "etag": "abcdef0123A=", + "friendlyName": "bigquerydataset-fullyconfigured-updated", + "id": "000000000000000000000", + "kind": "bigquery#dataset", + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "lastModifiedTime": "123456789", + "location": "US", + "maxTimeTravelHours": "96", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", + "storageBillingModel": "LOGICAL", + "type": "DEFAULT" +} + +--- + +DELETE https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/${datasetID}?alt=json&prettyPrint=false +User-Agent: kcc/controller-manager + +204 No Content +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + + +--- + +GET https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "email": "bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "etag": "abcdef0123A=", + "name": "projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com", + "oauth2ClientId": "888888888888888888888", + "projectId": "${projectId}", + "uniqueId": "111111111111111111111" +} + +--- + +DELETE https://iam.googleapis.com/v1/projects/${projectId}/serviceAccounts/bigquerydataset-dep@${projectId}.iam.gserviceaccount.com?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:getIamPolicy?alt=json&options.requestedPolicyVersion=3&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "bindings": [ + { + "members": [ + "serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com" + ], + "role": "roles/cloudkms.cryptoKeyEncrypterDecrypter" + } + ], + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}:setIamPolicy?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{ + "policy": { + "etag": "abcdef0123A=", + "version": 3 + } +} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "etag": "abcdef0123A=", + "version": 1 +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyScheduledDuration": "2592000s", + "labels": { + "cnrm-test": "true", + "key-one": "value-one", + "managed-by-cnrm": "true" + }, + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}", + "primary": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + }, + "purpose": "ENCRYPT_DECRYPT", + "versionTemplate": { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "protectionLevel": "SOFTWARE" + } +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions?alt=json&prettyPrint=false +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "cryptoKeyVersions": [ + { + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "ENABLED" + } + ], + "totalSize": 1 +} + +--- + +POST https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1:destroy?alt=json&prettyPrint=false +Content-Type: application/json +User-Agent: google-api-go-client/0.5 Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +{} + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "algorithm": "GOOGLE_SYMMETRIC_ENCRYPTION", + "createTime": "2024-04-01T12:34:56.123456Z", + "destroyTime": "2024-04-01T12:34:56.123456Z", + "generateTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}/cryptoKeys/kmscryptokey-${uniqueId}/cryptoKeyVersions/1", + "protectionLevel": "SOFTWARE", + "state": "DESTROY_SCHEDULED" +} + +--- + +GET https://cloudkms.googleapis.com/v1/projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}?alt=json +Content-Type: application/json +User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 terraform-provider-google-beta/kcc/controller-manager + +200 OK +Cache-Control: private +Content-Type: application/json; charset=UTF-8 +Server: ESF +Vary: Origin +Vary: X-Origin +Vary: Referer +X-Content-Type-Options: nosniff +X-Frame-Options: SAMEORIGIN +X-Xss-Protection: 0 + +{ + "createTime": "2024-04-01T12:34:56.123456Z", + "name": "projects/${projectId}/locations/us/keyRings/kmskeyring-${uniqueId}" +} \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/create.yaml new file mode 100644 index 0000000000..d8b4b77639 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/create.yaml @@ -0,0 +1,38 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + description: "Fully Configured BigQuery Dataset" + friendlyName: bigquerydataset-fullyconfigured + defaultPartitionExpirationMs: 3600000 + defaultTableExpirationMs: 3600000 + defaultCollation: und:ci + defaultEncryptionConfiguration: + kmsKeyRef: + name: kmscryptokey-${uniqueId} + isCaseInsensitive: true + location: US + maxTimeTravelHours: "72" + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + storageBillingModel: LOGICAL diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/dependencies.yaml new file mode 100644 index 0000000000..47a1531b49 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/dependencies.yaml @@ -0,0 +1,53 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: kms.cnrm.cloud.google.com/v1beta1 +kind: KMSKeyRing +metadata: + name: kmskeyring-${uniqueId} +spec: + location: us +--- +apiVersion: kms.cnrm.cloud.google.com/v1beta1 +kind: KMSCryptoKey +metadata: + annotations: + cnrm.cloud.google.com/project-id: ${projectId} + labels: + key-one: value-one + name: kmscryptokey-${uniqueId} +spec: + keyRingRef: + name: kmskeyring-${uniqueId} +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMPolicy +metadata: + name: iampolicy-${uniqueId} +spec: + resourceRef: + apiVersion: kms.cnrm.cloud.google.com/v1beta1 + kind: KMSCryptoKey + name: kmscryptokey-${uniqueId} + bindings: + - role: roles/cloudkms.cryptoKeyEncrypterDecrypter + members: + - serviceAccount:bq-${projectNumber}@bigquery-encryption.iam.gserviceaccount.com +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + cnrm.cloud.google.com/project-id: "${projectId}" + name: bigquerydataset-dep diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/update.yaml new file mode 100644 index 0000000000..3fe9cae024 --- /dev/null +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerydataset/fullybigquerydataset-direct/update.yaml @@ -0,0 +1,42 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydataset${uniqueId} + annotations: + alpha.cnrm.cloud.google.com/reconciler: "direct" +spec: + description: "Fully Configured BigQuery Dataset updated" + friendlyName: bigquerydataset-fullyconfigured-updated + defaultPartitionExpirationMs: 3800000 + defaultTableExpirationMs: 3800000 + defaultCollation: "" + defaultEncryptionConfiguration: + kmsKeyRef: + name: kmscryptokey-${uniqueId} + isCaseInsensitive: false + location: US + maxTimeTravelHours: "96" + projectRef: + external: ${projectId} + access: + - role: OWNER + specialGroup: projectOwners + - role: READER + domain: google.com + - role: OWNER + userByEmail: bigquerydataset-dep@${projectId}.iam.gserviceaccount.com + storageBillingModel: LOGICAL \ No newline at end of file diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml index dfc500b102..10c9ffaed8 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/_vcr_cassettes/tf.yaml @@ -15,1666 +15,1214 @@ --- version: 2 interactions: - - id: 0 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 369.624831ms - - id: 1 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 392.395968ms - - id: 2 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 138 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"datasetReference":{"datasetId":"bigquerydataset12yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"US"} - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: true + body: fake error message + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 404 Not Found + code: 404 + duration: 234.053167ms + - id: 1 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: true + body: fake error message + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 404 Not Found + code: 404 + duration: 241.932587ms + - id: 2 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 147 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: | + {"datasetReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"us-central1"} + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json + method: POST + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 468.340052ms - - id: 3 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 138 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"datasetReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"US"} - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 555.587721ms - - id: 4 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 212.337709ms - - id: 5 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 296.156142ms - - id: 6 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 252.63049ms - - id: 7 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 201.043185ms - - id: 8 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" - } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 279.454008ms - - id: 9 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 200 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"tableReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir","projectId":"example-project","tableId":"bigquerytable2yq2ldf3wcoir"}} - form: {} - headers: - Content-Type: - - application/json - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables?alt=json&prettyPrint=false - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 327.577094ms - - id: 10 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 294.786438ms - - id: 11 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 404 Not Found - code: 404 - duration: 159.509962ms - - id: 12 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 242.697825ms - - id: 13 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 887 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: | - {"configuration":{"jobTimeoutMs":"600000","labels":{"cnrm-test":"true","label-one":"value-one","managed-by-cnrm":"true"},"query":{"allowLargeResults":true,"createDisposition":"CREATE_NEVER","defaultDataset":{"datasetId":"bigquerydataset12yq2ldf3wcoir","projectId":"example-project"},"destinationTable":{"datasetId":"bigquerydataset22yq2ldf3wcoir","projectId":"example-project","tableId":"bigquerytable2yq2ldf3wcoir"},"flattenResults":true,"priority":"INTERACTIVE","query":"SELECT state FROM [lookerdata:cdc.project_tycho_reports]","schemaUpdateOptions":["ALLOW_FIELD_ADDITION","ALLOW_FIELD_RELAXATION"],"scriptOptions":{"keyResultStatement":"LAST","statementTimeoutMs":"300000"},"useLegacySql":true,"useQueryCache":true,"writeDisposition":"WRITE_APPEND"}},"jobReference":{"jobId":"bigqueryjob-2yq2ldf3wcoir","location":"US","project":"example-project"}} - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs?alt=json - method: POST - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "2HQmN+wdbuo22yXteShF0Q==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "statementType": "SELECT" + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT" } - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 777.691638ms - - id: 14 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "2HQmN+wdbuo22yXteShF0Q==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 361.322656ms + - id: 3 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 147 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: | + {"datasetReference":{"datasetId":"bigquerydataset12yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"us-central1"} + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets?alt=json + method: POST + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "statementType": "SELECT" + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT" } - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 142.036947ms - - id: 15 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "NJVLiQ1/htz+e/SOteT2FQ==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 356.365747ms + - id: 4 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" - ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "queryPlan": [ + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ { - "name": "S00: Output", - "id": "0", - "startMs": "1714007926424", - "waitMsAvg": "0", - "waitMsMax": "0", - "readMsAvg": "0", - "readMsMax": "0", - "computeMsAvg": "0", - "computeMsMax": "0", - "writeMsAvg": "0", - "writeMsMax": "0", - "shuffleOutputBytes": "0", - "shuffleOutputBytesSpilled": "0", - "recordsRead": "0", - "recordsWritten": "0", - "parallelInputs": "1", - "completedParallelInputs": "0", - "status": "RUNNING", - "steps": [ - { - "kind": "READ", - "substeps": [ - "state", - "FROM lookerdata:cdc.project_tycho_reports AS lookerdata:cdc.project_tycho_reports" - ] - }, - { - "kind": "WRITE", - "substeps": [ - "state", - "TO __stage00_output" - ] - } - ], - "slotMs": "0", - "computeMode": "BIGQUERY" + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" } ], - "estimatedBytesProcessed": "3037868", - "timeline": [ + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 142.666316ms + - id: 5 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "projectId": "example-project" + }, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, { - "elapsedMs": "892", - "totalSlotMs": "107", - "pendingUnits": "1", - "completedUnits": "0", - "activeUnits": "1", - "estimatedRunnableUnits": "0" + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" } ], - "totalSlotMs": "107", - "statementType": "SELECT" - }, - "totalSlotMs": "107" - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 165.695217ms - - id: 16 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "NJVLiQ1/htz+e/SOteT2FQ==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { - "projectId": "example-project", + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 146.095736ms + - id: 6 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" - }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" - ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "queryPlan": [ + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ { - "name": "S00: Output", - "id": "0", - "startMs": "1714007926424", - "waitMsAvg": "0", - "waitMsMax": "0", - "readMsAvg": "0", - "readMsMax": "0", - "computeMsAvg": "0", - "computeMsMax": "0", - "writeMsAvg": "0", - "writeMsMax": "0", - "shuffleOutputBytes": "0", - "shuffleOutputBytesSpilled": "0", - "recordsRead": "0", - "recordsWritten": "0", - "parallelInputs": "1", - "completedParallelInputs": "0", - "status": "RUNNING", - "steps": [ - { - "kind": "READ", - "substeps": [ - "state", - "FROM lookerdata:cdc.project_tycho_reports AS lookerdata:cdc.project_tycho_reports" - ] - }, - { - "kind": "WRITE", - "substeps": [ - "state", - "TO __stage00_output" - ] - } - ], - "slotMs": "0", - "computeMode": "BIGQUERY" + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" } ], - "estimatedBytesProcessed": "3037868", - "timeline": [ + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 139.543417ms + - id: 7 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: true + body: fake error message + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 404 Not Found + code: 404 + duration: 135.950427ms + - id: 8 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, { - "elapsedMs": "892", - "totalSlotMs": "107", - "pendingUnits": "1", - "completedUnits": "0", - "activeUnits": "1", - "estimatedRunnableUnits": "0" + "role": "READER", + "specialGroup": "projectReaders" } ], - "totalSlotMs": "107", - "statementType": "SELECT" - }, - "totalSlotMs": "107" - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 147.020072ms - - id: 17 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=US - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#job", - "etag": "NJVLiQ1/htz+e/SOteT2FQ==", - "id": "example-project:US.bigqueryjob-2yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=US", - "user_email": "integration-test@example-project.iam.gserviceaccount.com", - "configuration": { - "query": { - "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", - "destinationTable": { + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 240.845627ms + - id: 9 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 190 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: | + {"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"tableReference":{"datasetId":"bigquerydataset22yq2ldf3wcoir","projectId":"example-project","tableId":"bigquerytable2yq2ldf3wcoir"}} + form: {} + headers: + Content-Type: + - application/json + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables?alt=json&prettyPrint=false + method: POST + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 175.494323ms + - id: 10 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 237.337328ms + - id: 11 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 133.567967ms + - id: 12 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=us-central1 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#job", + "etag": "qywbpQBNxG6Twvxjt/luNw==", + "id": "example-project:us-central1.bigqueryjob-2yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=us-central1", + "user_email": "xiaoweim@google.com", + "configuration": { + "query": { + "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", + "destinationTable": { + "projectId": "example-project", + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "tableId": "bigquerytable2yq2ldf3wcoir" + }, + "createDisposition": "CREATE_NEVER", + "writeDisposition": "WRITE_APPEND", + "defaultDataset": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "priority": "INTERACTIVE", + "allowLargeResults": true, + "useQueryCache": true, + "flattenResults": true, + "useLegacySql": true, + "schemaUpdateOptions": [ + "ALLOW_FIELD_ADDITION", + "ALLOW_FIELD_RELAXATION" + ], + "scriptOptions": { + "statementTimeoutMs": "300000", + "keyResultStatement": "LAST" + } + }, + "jobTimeoutMs": "600000", + "labels": { + "cnrm-test": "true", + "label-one": "value-one", + "managed-by-cnrm": "true" + }, + "jobType": "QUERY" + }, + "jobReference": { "projectId": "example-project", - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "tableId": "bigquerytable2yq2ldf3wcoir" + "jobId": "bigqueryjob-2yq2ldf3wcoir", + "location": "us-central1" + }, + "statistics": { + "creationTime": "1729044292505", + "startTime": "1729044292825", + "endTime": "1729044292825" + }, + "status": { + "errorResult": { + "reason": "accessDenied", + "debugInfo": "[ACCESS_DENIED] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001 errorProto=code: \"ACCESS_DENIED\"\nargument: \"Table\"\nargument: \"lookerdata:cdc.project_tycho_reports\"\nargument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\"\n\n\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:350)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.callAuthServer(QueryAuthorizer.java:415)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.lambda$authorizeQuery$2(QueryAuthorizer.java:183)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\n\tat io.grpc.Context.run(Context.java:536)\n\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\n\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\n\tat java.base/java.lang.Thread.run(Unknown Source)\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\n\t\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.authorizeQuery(QueryAuthorizer.java:194)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1640)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1626)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:575)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: [NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.643-07:00 errorProto=code: \"NOT_FOUND\"\nargument: \"Dataset\"\nargument: \"lookerdata:cdc\"\n\n\t\tat com.google.cloud.helix.server.job.CrossRegionDatasetResolver.resolve(CrossRegionDatasetResolver.java:162)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.addTablesAndRoutinesToCache(ReferenceCollector.java:1380)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:560)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.getDone(HelixFutures.java:55)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDone(JobStateMachine.java:3088)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDoneInSpan(JobStateMachine.java:3077)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$checkInternal$16(JobStateMachine.java:3067)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CallableInterruptibleTask.runInterruptibly(CombinedFuture.java:198)\n\t\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\t\tat com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:32)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CombinedFutureInterruptibleTask.execute(CombinedFuture.java:110)\n\t\tat com.google.common.util.concurrent.CombinedFuture.handleAllCompleted(CombinedFuture.java:67)\n\t\tat com.google.common.util.concurrent.AggregateFuture.processCompleted(AggregateFuture.java:317)\n\t\tat com.google.common.util.concurrent.AggregateFuture.decrementCountAndMaybeComplete(AggregateFuture.java:299)\n\t\tat com.google.common.util.concurrent.AggregateFuture.init(AggregateFuture.java:174)\n\t\tat com.google.common.util.concurrent.CombinedFuture.\u003cinit\u003e(CombinedFuture.java:57)\n\t\tat com.google.common.util.concurrent.Futures$FutureCombiner.call(Futures.java:883)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3066)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\nCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\n\tat com.google.cloud.helix.proto2.Authorization$Stub.authorizeQuery(Authorization.java:198)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:332)\n\t... 16 more\n", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + }, + "errors": [ + { + "reason": "accessDenied", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + } + ], + "state": "DONE" + }, + "principal_subject": "user:xiaoweim@google.com", + "jobCreationReason": { + "code": "REQUESTED" + } + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 225.331399ms + - id: 13 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=us-central1 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#job", + "etag": "qywbpQBNxG6Twvxjt/luNw==", + "id": "example-project:us-central1.bigqueryjob-2yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=us-central1", + "user_email": "xiaoweim@google.com", + "configuration": { + "query": { + "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", + "destinationTable": { + "projectId": "example-project", + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "tableId": "bigquerytable2yq2ldf3wcoir" + }, + "createDisposition": "CREATE_NEVER", + "writeDisposition": "WRITE_APPEND", + "defaultDataset": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "priority": "INTERACTIVE", + "allowLargeResults": true, + "useQueryCache": true, + "flattenResults": true, + "useLegacySql": true, + "schemaUpdateOptions": [ + "ALLOW_FIELD_ADDITION", + "ALLOW_FIELD_RELAXATION" + ], + "scriptOptions": { + "statementTimeoutMs": "300000", + "keyResultStatement": "LAST" + } + }, + "jobTimeoutMs": "600000", + "labels": { + "cnrm-test": "true", + "label-one": "value-one", + "managed-by-cnrm": "true" + }, + "jobType": "QUERY" + }, + "jobReference": { + "projectId": "example-project", + "jobId": "bigqueryjob-2yq2ldf3wcoir", + "location": "us-central1" }, - "createDisposition": "CREATE_NEVER", - "writeDisposition": "WRITE_APPEND", - "defaultDataset": { + "statistics": { + "creationTime": "1729044292505", + "startTime": "1729044292825", + "endTime": "1729044292825" + }, + "status": { + "errorResult": { + "reason": "accessDenied", + "debugInfo": "[ACCESS_DENIED] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001 errorProto=code: \"ACCESS_DENIED\"\nargument: \"Table\"\nargument: \"lookerdata:cdc.project_tycho_reports\"\nargument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\"\n\n\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:350)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.callAuthServer(QueryAuthorizer.java:415)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.lambda$authorizeQuery$2(QueryAuthorizer.java:183)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\n\tat io.grpc.Context.run(Context.java:536)\n\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\n\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\n\tat java.base/java.lang.Thread.run(Unknown Source)\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\n\t\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.authorizeQuery(QueryAuthorizer.java:194)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1640)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1626)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:575)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: [NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.643-07:00 errorProto=code: \"NOT_FOUND\"\nargument: \"Dataset\"\nargument: \"lookerdata:cdc\"\n\n\t\tat com.google.cloud.helix.server.job.CrossRegionDatasetResolver.resolve(CrossRegionDatasetResolver.java:162)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.addTablesAndRoutinesToCache(ReferenceCollector.java:1380)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:560)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.getDone(HelixFutures.java:55)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDone(JobStateMachine.java:3088)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDoneInSpan(JobStateMachine.java:3077)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$checkInternal$16(JobStateMachine.java:3067)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CallableInterruptibleTask.runInterruptibly(CombinedFuture.java:198)\n\t\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\t\tat com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:32)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CombinedFutureInterruptibleTask.execute(CombinedFuture.java:110)\n\t\tat com.google.common.util.concurrent.CombinedFuture.handleAllCompleted(CombinedFuture.java:67)\n\t\tat com.google.common.util.concurrent.AggregateFuture.processCompleted(AggregateFuture.java:317)\n\t\tat com.google.common.util.concurrent.AggregateFuture.decrementCountAndMaybeComplete(AggregateFuture.java:299)\n\t\tat com.google.common.util.concurrent.AggregateFuture.init(AggregateFuture.java:174)\n\t\tat com.google.common.util.concurrent.CombinedFuture.\u003cinit\u003e(CombinedFuture.java:57)\n\t\tat com.google.common.util.concurrent.Futures$FutureCombiner.call(Futures.java:883)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3066)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\nCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\n\tat com.google.cloud.helix.proto2.Authorization$Stub.authorizeQuery(Authorization.java:198)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:332)\n\t... 16 more\n", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + }, + "errors": [ + { + "reason": "accessDenied", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + } + ], + "state": "DONE" + }, + "principal_subject": "user:xiaoweim@google.com", + "jobCreationReason": { + "code": "REQUESTED" + } + } + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 249.474456ms + - id: 14 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: '{"kind":"bigquery#table","etag":"zP4N7TpLpSchHSlPBuMVew==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233765250","lastModifiedTime":"1729233765330","type":"TABLE","location":"us-central1","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 114.392789ms + - id: 15 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | + { + "kind": "bigquery#dataset", + "etag": "JfHG5KJ6x0vqD5TJAHK4ag==", + "id": "example-project:bigquerydataset12yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", + "datasetReference": { "datasetId": "bigquerydataset12yq2ldf3wcoir", "projectId": "example-project" }, - "priority": "INTERACTIVE", - "allowLargeResults": true, - "useQueryCache": true, - "flattenResults": true, - "useLegacySql": true, - "schemaUpdateOptions": [ - "ALLOW_FIELD_ADDITION", - "ALLOW_FIELD_RELAXATION" - ], - "scriptOptions": { - "statementTimeoutMs": "300000", - "keyResultStatement": "LAST" - } - }, - "jobTimeoutMs": "600000", - "labels": { - "cnrm-test": "true", - "label-one": "value-one", - "managed-by-cnrm": "true" - }, - "jobType": "QUERY" - }, - "jobReference": { - "projectId": "example-project", - "jobId": "bigqueryjob-2yq2ldf3wcoir", - "location": "US" - }, - "statistics": { - "creationTime": "1714007925436", - "startTime": "1714007926032", - "query": { - "queryPlan": [ + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ { - "name": "S00: Output", - "id": "0", - "startMs": "1714007926424", - "waitMsAvg": "0", - "waitMsMax": "0", - "readMsAvg": "0", - "readMsMax": "0", - "computeMsAvg": "0", - "computeMsMax": "0", - "writeMsAvg": "0", - "writeMsMax": "0", - "shuffleOutputBytes": "0", - "shuffleOutputBytesSpilled": "0", - "recordsRead": "0", - "recordsWritten": "0", - "parallelInputs": "1", - "completedParallelInputs": "0", - "status": "RUNNING", - "steps": [ - { - "kind": "READ", - "substeps": [ - "state", - "FROM lookerdata:cdc.project_tycho_reports AS lookerdata:cdc.project_tycho_reports" - ] - }, - { - "kind": "WRITE", - "substeps": [ - "state", - "TO __stage00_output" - ] - } - ], - "slotMs": "0", - "computeMode": "BIGQUERY" - } - ], - "estimatedBytesProcessed": "3037868", - "timeline": [ + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, { - "elapsedMs": "892", - "totalSlotMs": "107", - "pendingUnits": "1", - "completedUnits": "0", - "activeUnits": "1", - "estimatedRunnableUnits": "0" + "role": "READER", + "specialGroup": "projectReaders" } ], - "totalSlotMs": "107", - "statementType": "SELECT" - }, - "totalSlotMs": "107" - }, - "status": { - "state": "RUNNING" - }, - "principal_subject": "serviceAccount:integration-test@example-project.iam.gserviceaccount.com", - "jobCreationReason": { - "code": "REQUESTED" - } - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 157.11189ms - - id: 18 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, - { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" + "creationTime": "1729233764153", + "lastModifiedTime": "1729233764153", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 226.627284ms - - id: 19 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "1oxb+cE169ziQbocLgYz5w==", - "id": "example-project:bigquerydataset12yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset12yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 164.637625ms + - id: 16 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" + "kind": "bigquery#dataset", + "etag": "UTD7bH0f//NO3dLHXzmVTQ==", + "id": "example-project:bigquerydataset22yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", + "datasetReference": { + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "projectId": "example-project" + }, + "labels": { + "cnrm-test": "true", + "managed-by-cnrm": "true" + }, + "access": [ + { + "role": "WRITER", + "specialGroup": "projectWriters" + }, + { + "role": "OWNER", + "specialGroup": "projectOwners" + }, + { + "role": "OWNER", + "userByEmail": "xiaoweim@google.com" + }, + { + "role": "READER", + "specialGroup": "projectReaders" + } + ], + "creationTime": "1729233764136", + "lastModifiedTime": "1729233764136", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } - ], - "creationTime": "1714007921736", - "lastModifiedTime": "1714007921736", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 227.96517ms - - id: 20 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: '{"kind":"bigquery#table","etag":"5tjsrJs4nIXqBqyB5xL59w==","id":"example-project:bigquerydataset22yq2ldf3wcoir.bigquerytable2yq2ldf3wcoir","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir","tableReference":{"projectId":"example-project","datasetId":"bigquerydataset22yq2ldf3wcoir","tableId":"bigquerytable2yq2ldf3wcoir"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1714007923844","lastModifiedTime":"1714007923961","type":"TABLE","location":"US","numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 318.668537ms - - id: 21 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json&deleteContents=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: true - body: fake error message - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 400 Bad Request - code: 400 - duration: 188.401707ms - - id: 22 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json&deleteContents=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: false - body: "" - headers: - Content-Length: - - "0" - Content-Type: - - application/json; charset=UTF-8 - status: 204 No Content - code: 204 - duration: 246.780652ms - - id: 23 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - X-Goog-Api-Client: - - gl-go/1.21.5 gdcl/0.160.0 - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: false - body: "" - headers: - Content-Length: - - "0" - Content-Type: - - application/json; charset=UTF-8 - status: 204 No Content - code: 204 - duration: 250.765634ms - - id: 24 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json - method: GET - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: -1 - uncompressed: true - body: | - { - "kind": "bigquery#dataset", - "etag": "5BIDIpkSuP2XjG2TO/A+pA==", - "id": "example-project:bigquerydataset22yq2ldf3wcoir", - "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir", - "datasetReference": { - "datasetId": "bigquerydataset22yq2ldf3wcoir", - "projectId": "example-project" - }, - "labels": { - "cnrm-test": "true", - "managed-by-cnrm": "true" - }, - "access": [ - { - "role": "WRITER", - "specialGroup": "projectWriters" - }, + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 169.425364ms + - id: 17 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?alt=json&location=us-central1 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: -1 + uncompressed: true + body: | { - "role": "OWNER", - "specialGroup": "projectOwners" - }, - { - "role": "OWNER", - "userByEmail": "integration-test@example-project.iam.gserviceaccount.com" - }, - { - "role": "READER", - "specialGroup": "projectReaders" + "kind": "bigquery#job", + "etag": "qywbpQBNxG6Twvxjt/luNw==", + "id": "example-project:us-central1.bigqueryjob-2yq2ldf3wcoir", + "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/jobs/bigqueryjob-2yq2ldf3wcoir?location=us-central1", + "user_email": "xiaoweim@google.com", + "configuration": { + "query": { + "query": "SELECT state FROM [lookerdata:cdc.project_tycho_reports]", + "destinationTable": { + "projectId": "example-project", + "datasetId": "bigquerydataset22yq2ldf3wcoir", + "tableId": "bigquerytable2yq2ldf3wcoir" + }, + "createDisposition": "CREATE_NEVER", + "writeDisposition": "WRITE_APPEND", + "defaultDataset": { + "datasetId": "bigquerydataset12yq2ldf3wcoir", + "projectId": "example-project" + }, + "priority": "INTERACTIVE", + "allowLargeResults": true, + "useQueryCache": true, + "flattenResults": true, + "useLegacySql": true, + "schemaUpdateOptions": [ + "ALLOW_FIELD_ADDITION", + "ALLOW_FIELD_RELAXATION" + ], + "scriptOptions": { + "statementTimeoutMs": "300000", + "keyResultStatement": "LAST" + } + }, + "jobTimeoutMs": "600000", + "labels": { + "cnrm-test": "true", + "label-one": "value-one", + "managed-by-cnrm": "true" + }, + "jobType": "QUERY" + }, + "jobReference": { + "projectId": "example-project", + "jobId": "bigqueryjob-2yq2ldf3wcoir", + "location": "us-central1" + }, + "statistics": { + "creationTime": "1729044292505", + "startTime": "1729044292825", + "endTime": "1729044292825" + }, + "status": { + "errorResult": { + "reason": "accessDenied", + "debugInfo": "[ACCESS_DENIED] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001 errorProto=code: \"ACCESS_DENIED\"\nargument: \"Table\"\nargument: \"lookerdata:cdc.project_tycho_reports\"\nargument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\"\n\n\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:350)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.callAuthServer(QueryAuthorizer.java:415)\n\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.lambda$authorizeQuery$2(QueryAuthorizer.java:183)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\n\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\n\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\n\tat io.grpc.Context.run(Context.java:536)\n\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\n\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\n\tat java.base/java.lang.Thread.run(Unknown Source)\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\n\t\tat com.google.cloud.helix.server.auth.service.authorizers.QueryAuthorizer.authorizeQuery(QueryAuthorizer.java:194)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1640)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.authorizeEntitiesRead(ReferenceCollector.java:1626)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:575)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: [NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.643-07:00 errorProto=code: \"NOT_FOUND\"\nargument: \"Dataset\"\nargument: \"lookerdata:cdc\"\n\n\t\tat com.google.cloud.helix.server.job.CrossRegionDatasetResolver.resolve(CrossRegionDatasetResolver.java:162)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.addTablesAndRoutinesToCache(ReferenceCollector.java:1380)\n\t\tat com.google.cloud.helix.server.job.ReferenceCollector.getReferenceTree(ReferenceCollector.java:560)\n\t\tat com.google.cloud.helix.server.job.CatalogMetadataResolver.resolve(CatalogMetadataResolver.java:150)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.addTables(QueryAnalyzer.java:948)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.parseReferencedTables(QueryAnalyzer.java:4298)\n\t\tat com.google.cloud.helix.server.job.QueryAnalyzer.buildQueryInfo(QueryAnalyzer.java:3603)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfoInternal(LocalQueryJobController.java:4014)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.loadQueryInfo(LocalQueryJobController.java:4089)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkInternal(LocalQueryJobController.java:4704)\n\t\tat com.google.cloud.helix.server.job.LocalQueryJobController.checkAsync(LocalQueryJobController.java:4620)\n\t\tat com.google.cloud.helix.server.job.LocalSqlJobController.checkAsync(LocalSqlJobController.java:129)\n\t\tat com.google.cloud.helix.server.job.LocalJobController.check(LocalJobController.java:1503)\n\t\tat com.google.cloud.helix.server.job.JobControllerModule$1.check(JobControllerModule.java:831)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine$1.check(JobStateMachine.java:3794)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3063)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\n\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\n\t\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\n\t\tat com.google.cloud.helix.common.HelixFutures.getDone(HelixFutures.java:55)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDone(JobStateMachine.java:3088)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.handleCheckDoneInSpan(JobStateMachine.java:3077)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$checkInternal$16(JobStateMachine.java:3067)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CallableInterruptibleTask.runInterruptibly(CombinedFuture.java:198)\n\t\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\n\t\tat com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:32)\n\t\tat com.google.common.util.concurrent.CombinedFuture$CombinedFutureInterruptibleTask.execute(CombinedFuture.java:110)\n\t\tat com.google.common.util.concurrent.CombinedFuture.handleAllCompleted(CombinedFuture.java:67)\n\t\tat com.google.common.util.concurrent.AggregateFuture.processCompleted(AggregateFuture.java:317)\n\t\tat com.google.common.util.concurrent.AggregateFuture.decrementCountAndMaybeComplete(AggregateFuture.java:299)\n\t\tat com.google.common.util.concurrent.AggregateFuture.init(AggregateFuture.java:174)\n\t\tat com.google.common.util.concurrent.CombinedFuture.\u003cinit\u003e(CombinedFuture.java:57)\n\t\tat com.google.common.util.concurrent.Futures$FutureCombiner.call(Futures.java:883)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.checkInternal(JobStateMachine.java:3066)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.lambda$runJob$8(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runState(JobStateMachine.java:2941)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.runJob(JobStateMachine.java:2713)\n\t\tat com.google.cloud.helix.server.job.JobStateMachine.execute(JobStateMachine.java:2602)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertNormalizedJob(HelixJobRosy.java:1728)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternalWithoutQuota(HelixJobRosy.java:2380)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertJobInternal(HelixJobRosy.java:2270)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertInternal(HelixJobRosy.java:2409)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insertRequestInternal(HelixJobRosy.java:3909)\n\t\tat com.google.cloud.helix.server.job.rosy.HelixJobRosy.insert(HelixJobRosy.java:3880)\n\t\tat java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(Unknown Source)\n\t\tat java.base/java.lang.reflect.Method.invoke(Unknown Source)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$innerContinuation$3(RpcRequestProxy.java:435)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestDapperHookFactory$TracingRequestHook.call(RosyRequestDapperHookFactory.java:88)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestCredsHookFactory$1.call(RosyRequestCredsHookFactory.java:56)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestConcurrentCallsHookFactory$Hook.call(RosyRequestConcurrentCallsHookFactory.java:101)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestVarzHookFactory$Hook.call(RosyRequestVarzHookFactory.java:464)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestAuditHookFactory$1.call(RosyRequestAuditHookFactory.java:107)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RequestSecurityExtensionForGwsHookFactory$1.call(RequestSecurityExtensionForGwsHookFactory.java:69)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestSecurityContextHookFactory$1.call(RosyRequestSecurityContextHookFactory.java:83)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RosyRequestContextHookFactory.call(RosyRequestContextHookFactory.java:63)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.oneplatform.common.OnePlatformRosyHookModule$MendelIdLogger$1.call(OnePlatformRosyHookModule.java:123)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.lambda$makeContinuation$4(RpcRequestProxy.java:461)\n\t\tat com.google.cloud.helix.common.rosy.RpcRequestProxy.invoke(RpcRequestProxy.java:666)\n\t\tat jdk.proxy2/jdk.proxy2.$Proxy52.insert(Unknown Source)\n\t\tat com.google.cloud.helix.proto.proto2api.HelixJobService$ServiceParameters$1.handleRequest(HelixJobService.java:917)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\n\t\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\n\t\tat io.grpc.Context.run(Context.java:536)\n\t\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\n\t\tat com.google.net.rpc3.util.RpcInProcessConnector$ServerInternalContext.lambda$runWithExecutor$1(RpcInProcessConnector.java:1995)\n\t\t... 7 more\nCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Authorization.AuthorizeQuery;domain: \"cloud.helix.ErrorDomain\" code: \"ACCESS_DENIED\" argument: \"Table\" argument: \"lookerdata:cdc.project_tycho_reports\" argument: \"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\" debug_info: \"[ACCESS_DENIED] errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Table\\\"\\nargument: \\\"lookerdata:cdc.project_tycho_reports\\\"\\nargument: \\\"User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist.\\\"\\n\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:615)\\n\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:619)\\n\\tat com.google.cloud.helix.common.UserTableReference.buildAccessDeniedException(UserTableReference.java:79)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.authorizeMissingDataset(QueryEntityReferenceConverter.java:257)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:206)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convertTable(QueryEntityReferenceConverter.java:117)\\n\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.convert(QueryEntityReferenceConverter.java:93)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$1(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:94)\\n\\tat com.google.cloud.helix.server.common.TreeNode.transform(TreeNode.java:96)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.lambda$authorizeQuery$0(AuthorizationService.java:444)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.runWithSecurityContext(AuthorizationService.java:1082)\\n\\tat com.google.cloud.helix.server.auth.service.AuthorizationService.authorizeQuery(AuthorizationService.java:395)\\n\\tat com.google.cloud.helix.proto2.Authorization$ServiceParameters$2.handleRequest(Authorization.java:511)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\n\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\n\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\n\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\n\\tat io.grpc.Context.run(Context.java:536)\\n\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\n\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\n\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\n\\tat java.base/java.lang.Thread.run(Unknown Source)\\n\\tSuppressed: [NOT_FOUND] message=com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001 debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\"NOT_FOUND\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\n\\n\\t\\tat com.google.cloud.helix.server.common.StubbyUtil.createHelixException(StubbyUtil.java:229)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:82)\\n\\t\\tat com.google.cloud.helix.server.auth.QueryEntityReferenceConverter.normalizeIfNeeded(QueryEntityReferenceConverter.java:201)\\n\\t\\t... 36 more\\n\\tCaused by: com.google.net.rpc3.client.RpcClientException: APPLICATION_ERROR;cloud.helix/Storage.GetDataset;domain: \\\"cloud.helix.ErrorDomain\\\" code: \\\"NOT_FOUND\\\" argument: \\\"Dataset\\\" argument: \\\"lookerdata:cdc\\\" debug_info: \\\"[NOT_FOUND] message=Dataset lookerdata:cdc not found debug=time: 2024-10-15T19:04:52.652-07:00 errorProto=code: \\\\\\\"NOT_FOUND\\\\\\\"\\\\nargument: \\\\\\\"Dataset\\\\\\\"\\\\nargument: \\\\\\\"lookerdata:cdc\\\\\\\"\\\\n\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:292)\\\\n\\\\tat com.google.cloud.helix.common.Exceptions$Public.resourceNotFound(Exceptions.java:296)\\\\n\\\\tat com.google.cloud.helix.server.metadata.DatasetTrackerSpanner.lambda$getDatasetEntityAsync$0(DatasetTrackerSpanner.java:705)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:230)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$AsyncTransformFuture.doTransform(AbstractTransformFuture.java:217)\\\\n\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\tat com.google.cloud.helix.common.ThreadPoolFactory.lambda$defaultThreadFactory$2(ThreadPoolFactory.java:143)\\\\n\\\\tat java.base/java.lang.Thread.run(Unknown Source)\\\\n\\\\tSuppressed: java.lang.Exception: Including call stack from HelixFutures\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.getHelixException(HelixFutures.java:76)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.HelixFutures.get(HelixFutures.java:42)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.metadata.DatasetTracker.lookupDataset(DatasetTracker.java:152)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDatasetInternal(GetDatasetImpl.java:60)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.GetDatasetImpl.getDataset(GetDatasetImpl.java:38)\\\\n\\\\t\\\\tat com.google.cloud.helix.server.rosy.StorageStubby.getDataset(StorageStubby.java:504)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4539)\\\\n\\\\t\\\\tat com.google.cloud.helix.proto2.Storage$ServiceParameters$81.handleBlockingRequest(Storage.java:4537)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcBlockingApplicationHandler.handleRequest(RpcBlockingApplicationHandler.java:28)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$RpcApplicationHandlerAdaptor.handleRequest(RpcServerInterceptor2Util.java:82)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.AggregatedRpcServerInterceptors.interceptRpc(AggregatedRpcServerInterceptors.java:100)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.prod.fireaxe.filters.FireaxeRpcServerInterceptorImpl.interceptRpc(FireaxeRpcServerInterceptorImpl.java:68)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.checkIsolationThrottlingAndContinueHandling(IsolationServerInterceptor.java:162)\\\\n\\\\t\\\\tat com.google.cloud.helix.common.isolation.IsolationServerInterceptor.interceptRpc(IsolationServerInterceptor.java:113)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInterceptor2Util$InterceptedApplicationHandlerImpl.handleRequest(RpcServerInterceptor2Util.java:67)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplicationWithCancellation(RpcServerInternalContext.java:693)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.lambda$runRpcInApplication$0(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerInternalContext.runRpcInApplication(RpcServerInternalContext.java:658)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1102)\\\\n\\\\t\\\\tat com.google.net.rpc3.impl.server.RpcServerChannel$4.apply(RpcServerChannel.java:1060)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\\\n\\\\t\\\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:80)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.runInContext(ContextRunnable.java:83)\\\\n\\\\t\\\\tat io.grpc.Context.run(Context.java:536)\\\\n\\\\t\\\\tat com.google.tracing.GenericContextCallback.runInInheritedContext(GenericContextCallback.java:78)\\\\n\\\\t\\\\tat com.google.common.context.ContextRunnable.run(ContextRunnable.java:74)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)\\\\n\\\\t\\\\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)\\\\n\\\\t\\\\t... 1 more\\\\n\\\";AppErrorCode=2;StartTimeMs=1729044292646;unknown;ResFormat=uncompressed;ServerTimeSec=0.006606848;LogBytes=256;Non-FailFast;EffSecLevel=none;ReqFormat=uncompressed;ReqID=e282aacfbcda1ddc;GlobalID=0;Server=[2002:a05:6845:9603:b0:3d:63a8:3b9e]:4001\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\\n\\t\\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\\n\\t\\tat com.google.cloud.helix.proto2.Storage$Stub.getDataset(Storage.java:1349)\\n\\t\\tat com.google.cloud.helix.server.metadata.lookup.RpcDatasetMetadataLookup.resolveDataset(RpcDatasetMetadataLookup.java:55)\\n\\t\\t... 37 more\\n\\tSuppressed: [ACCESS_DENIED] debug=User 768386550392: IAM access denied errorProto=code: \\\"ACCESS_DENIED\\\"\\nargument: \\\"Dataset\\\"\\nargument: \\\"lookerdata:cdc\\\"\\nargument: \\\"Permission bigquery.datasets.get denied on dataset lookerdata:cdc (or it may not exist).\\\"\\n\\n\\t\\tat com.google.cloud.helix.common.Exceptions$Public.accessDenied(Exceptions.java:574)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.createPermissionDeny(AuthorizerExceptions.java:262)\\n\\t\\tat com.google.cloud.helix.common.auth.AuthorizerExceptions$Public.datasetAccessDenied(AuthorizerExceptions.java:156)\\n\\t\\tat com.google.cloud.helix.server.auth.IamAuthorizer.lambda$authorizeDatasetInternal$4(IamAuthorizer.java:1194)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:259)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture$TransformFuture.doTransform(AbstractTransformFuture.java:249)\\n\\t\\tat com.google.common.util.concurrent.AbstractTransformFuture.run(AbstractTransformFuture.java:130)\\n\\t\\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:135)\\n\\t\\tat com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:77)\\n\\t\\tat com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:86)\\n\\t\\t... 7 more\\n\";AppErrorCode=4;StartTimeMs=1729044292643;unknown;Deadline(sec)=60.0;ResFormat=uncompressed;ServerTimeSec=0.174171421;LogBytes=256;FailFast;EffSecLevel=privacy_and_integrity;ReqFormat=uncompressed;ReqID=1d5a3c9d34b06964;GlobalID=0;Server=[2002:a05:6845:6018:b0:3d:366e:f847]:4001\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpcInternal(RpcStub.java:571)\n\tat com.google.net.rpc3.client.RpcStub.startBlockingRpc(RpcStub.java:471)\n\tat com.google.cloud.helix.proto2.Authorization$Stub.authorizeQuery(Authorization.java:198)\n\tat com.google.cloud.helix.common.auth.client.AuthorizationServiceClientImpl.authorizeQuery(AuthorizationServiceClientImpl.java:332)\n\t... 16 more\n", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + }, + "errors": [ + { + "reason": "accessDenied", + "message": "Access Denied: Table lookerdata:cdc.project_tycho_reports: User does not have permission to query table lookerdata:cdc.project_tycho_reports, or perhaps it does not exist." + } + ], + "state": "DONE" + }, + "principal_subject": "user:xiaoweim@google.com", + "jobCreationReason": { + "code": "REQUESTED" + } } - ], - "creationTime": "1714007921933", - "lastModifiedTime": "1714007921933", - "location": "US", - "type": "DEFAULT" - } - headers: - Content-Type: - - application/json; charset=UTF-8 - status: 200 OK - code: 200 - duration: 249.096259ms - - id: 25 - request: - proto: HTTP/1.1 - proto_major: 1 - proto_minor: 1 - content_length: 0 - transfer_encoding: [] - trailer: {} - host: bigquery.googleapis.com - remote_addr: "" - request_uri: "" - body: "" - form: {} - headers: - Content-Type: - - application/json - url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json&deleteContents=false - method: DELETE - response: - proto: HTTP/2.0 - proto_major: 2 - proto_minor: 0 - transfer_encoding: [] - trailer: {} - content_length: 0 - uncompressed: false - body: "" - headers: - Content-Length: - - "0" - Content-Type: - - application/json; charset=UTF-8 - status: 204 No Content - code: 204 - duration: 219.625286ms \ No newline at end of file + headers: + Content-Type: + - application/json; charset=UTF-8 + status: 200 OK + code: 200 + duration: 277.227604ms + - id: 18 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + X-Goog-Api-Client: + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir/tables/bigquerytable2yq2ldf3wcoir?alt=json&prettyPrint=false + method: DELETE + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: false + body: "" + headers: + Content-Length: + - "0" + Content-Type: + - application/json; charset=UTF-8 + status: 204 No Content + code: 204 + duration: 172.077964ms + - id: 19 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset12yq2ldf3wcoir?alt=json&deleteContents=false + method: DELETE + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: false + body: "" + headers: + Content-Length: + - "0" + Content-Type: + - application/json; charset=UTF-8 + status: 204 No Content + code: 204 + duration: 126.402208ms + - id: 20 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + transfer_encoding: [] + trailer: {} + host: bigquery.googleapis.com + remote_addr: "" + request_uri: "" + body: "" + form: {} + headers: + Content-Type: + - application/json + url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydataset22yq2ldf3wcoir?alt=json&deleteContents=false + method: DELETE + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + transfer_encoding: [] + trailer: {} + content_length: 0 + uncompressed: false + body: "" + headers: + Content-Length: + - "0" + Content-Type: + - application/json; charset=UTF-8 + status: 204 No Content + code: 204 + duration: 164.101024ms diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml index 932807a87f..f3d0ce6d36 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/create.yaml @@ -19,7 +19,7 @@ metadata: label-one: "value-one" name: bigqueryjob-${uniqueId} spec: - location: "US" + location: "us-central1" jobTimeoutMs: "600000" query: query: "SELECT state FROM [lookerdata:cdc.project_tycho_reports]" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml index 8c5848bc33..072a5fee49 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigqueryjob/dependencies.yaml @@ -16,11 +16,15 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: bigquerydataset1${uniqueId} +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: bigquerydataset2${uniqueId} +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log index 4263316151..abb5eb32c8 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_http.log @@ -42,7 +42,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -88,7 +88,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -142,7 +142,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -881,7 +881,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml index 8cf0aea6c7..3bef110462 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/_vcr_cassettes/tf.yaml @@ -47,20 +47,20 @@ interactions: - application/json; charset=UTF-8 status: 404 Not Found code: 404 - duration: 615.256925ms + duration: 249.604457ms - id: 1 request: proto: HTTP/1.1 proto_major: 1 proto_minor: 1 - content_length: 142 + content_length: 151 transfer_encoding: [] trailer: {} host: bigquery.googleapis.com remote_addr: "" request_uri: "" body: | - {"datasetReference":{"datasetId":"bigquerydatasetsamplel7b81f5rgmgk"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"US"} + {"datasetReference":{"datasetId":"bigquerydatasetsamplel7b81f5rgmgk"},"labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"location":"us-central1"} form: {} headers: Content-Type: @@ -78,7 +78,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -100,16 +100,16 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", "type": "DEFAULT" } headers: @@ -117,7 +117,7 @@ interactions: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 705.821848ms + duration: 455.813917ms - id: 2 request: proto: HTTP/1.1 @@ -147,7 +147,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -169,24 +169,25 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 444.121271ms + duration: 155.125015ms - id: 3 request: proto: HTTP/1.1 @@ -202,7 +203,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -219,7 +220,7 @@ interactions: - application/json; charset=UTF-8 status: 404 Not Found code: 404 - duration: 212.578131ms + duration: 124.971748ms - id: 4 request: proto: HTTP/1.1 @@ -249,7 +250,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -271,30 +272,31 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 419.825033ms + duration: 224.821628ms - id: 5 request: proto: HTTP/1.1 proto_major: 1 proto_minor: 1 - content_length: 507 + content_length: 509 transfer_encoding: [] trailer: {} host: bigquery.googleapis.com @@ -307,7 +309,7 @@ interactions: Content-Type: - application/json X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables?alt=json&prettyPrint=false method: POST response: @@ -318,13 +320,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 689.112379ms + duration: 437.450628ms - id: 6 request: proto: HTTP/1.1 @@ -340,7 +342,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -351,13 +353,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 326.434921ms + duration: 221.730449ms - id: 7 request: proto: HTTP/1.1 @@ -373,7 +375,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -384,13 +386,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 441.704191ms + duration: 227.818618ms - id: 8 request: proto: HTTP/1.1 @@ -406,7 +408,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -417,19 +419,19 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"ZYaCcTJ8PF5Tok2MBL096Q==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515937237","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"KksuznIrRtc4MrfLoiTYlQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233686562","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":true,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 448.04455ms + duration: 147.233476ms - id: 9 request: proto: HTTP/1.1 proto_major: 1 proto_minor: 1 - content_length: 1575 + content_length: 1577 transfer_encoding: [] trailer: {} host: bigquery.googleapis.com @@ -442,7 +444,7 @@ interactions: Content-Type: - application/json X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: PUT response: @@ -453,13 +455,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"BMni8WOOCXFgFa5BC/jfkg==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515940935","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"rCwyhaRq2nEw0eAFProieQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233689324","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 329.29309ms + duration: 232.683408ms - id: 10 request: proto: HTTP/1.1 @@ -475,7 +477,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -486,13 +488,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"BMni8WOOCXFgFa5BC/jfkg==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515940935","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"rCwyhaRq2nEw0eAFProieQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233689324","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 282.727165ms + duration: 276.581644ms - id: 11 request: proto: HTTP/1.1 @@ -522,7 +524,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -544,24 +546,25 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 327.354671ms + duration: 145.635566ms - id: 12 request: proto: HTTP/1.1 @@ -577,7 +580,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: GET response: @@ -588,13 +591,13 @@ interactions: trailer: {} content_length: -1 uncompressed: true - body: '{"kind":"bigquery#table","etag":"BMni8WOOCXFgFa5BC/jfkg==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1720515936786","lastModifiedTime":"1720515940935","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"US","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' + body: '{"kind":"bigquery#table","etag":"rCwyhaRq2nEw0eAFProieQ==","id":"example-project:bigquerydatasetsamplel7b81f5rgmgk.bigquerytablesamplel7b81f5rgmgk","selfLink":"https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk","tableReference":{"projectId":"example-project","datasetId":"bigquerydatasetsamplel7b81f5rgmgk","tableId":"bigquerytablesamplel7b81f5rgmgk"},"friendlyName":"bigquerytable-sample-updated","labels":{"cnrm-test":"true","managed-by-cnrm":"true"},"schema":{"fields":[{"name":"string_field_0","type":"STRING","mode":"NULLABLE"},{"name":"string_field_1","type":"STRING","mode":"NULLABLE"},{"name":"string_field_2","type":"STRING","mode":"NULLABLE"},{"name":"string_field_3","type":"STRING","mode":"NULLABLE"},{"name":"string_field_4","type":"STRING","mode":"NULLABLE"},{"name":"string_field_5","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_6","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_7","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_8","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_9","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_10","type":"STRING","mode":"NULLABLE"},{"name":"int64_field_11","type":"INTEGER","mode":"NULLABLE"},{"name":"int64_field_12","type":"INTEGER","mode":"NULLABLE"},{"name":"string_field_13","type":"STRING","mode":"NULLABLE"}]},"numBytes":"0","numLongTermBytes":"0","numRows":"0","creationTime":"1729233686287","lastModifiedTime":"1729233689324","type":"EXTERNAL","externalDataConfiguration":{"sourceUris":["gs://gcp-public-data-landsat/LC08/01/044/034/LC08_L1GT_044034_20130330_20170310_01_T2/LC08_L1GT_044034_20130330_20170310_01_T2_ANG.txt"],"sourceFormat":"CSV","autodetect":true,"compression":"NONE"},"location":"us-central1","requirePartitionFilter":false,"numTotalLogicalBytes":"0","numActiveLogicalBytes":"0","numLongTermLogicalBytes":"0"}' headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 371.806487ms + duration: 182.848192ms - id: 13 request: proto: HTTP/1.1 @@ -627,7 +630,7 @@ interactions: - application/json; charset=UTF-8 status: 400 Bad Request code: 400 - duration: 287.000684ms + duration: 138.945657ms - id: 14 request: proto: HTTP/1.1 @@ -643,7 +646,7 @@ interactions: form: {} headers: X-Goog-Api-Client: - - gl-go/1.22.5 gdcl/0.187.0 + - gl-go/1.23.0--20240626-RC01 cl/646990413 +5a18e79687 X:fieldtrack,boringcrypto gdcl/0.196.0 url: https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk/tables/bigquerytablesamplel7b81f5rgmgk?alt=json&prettyPrint=false method: DELETE response: @@ -662,7 +665,7 @@ interactions: - application/json; charset=UTF-8 status: 204 No Content code: 204 - duration: 316.441881ms + duration: 205.179181ms - id: 15 request: proto: HTTP/1.1 @@ -692,7 +695,7 @@ interactions: body: | { "kind": "bigquery#dataset", - "etag": "pYo/zUITYaROvtieJn/Efg==", + "etag": "uJ2Ltq8bm7J0FUJdKMsN4Q==", "id": "example-project:bigquerydatasetsamplel7b81f5rgmgk", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/example-project/datasets/bigquerydatasetsamplel7b81f5rgmgk", "datasetReference": { @@ -714,24 +717,25 @@ interactions: }, { "role": "OWNER", - "userByEmail": "andylu@pisces.joonix.net" + "userByEmail": "xiaoweim@google.com" }, { "role": "READER", "specialGroup": "projectReaders" } ], - "creationTime": "1720515934342", - "lastModifiedTime": "1720515934342", - "location": "US", - "type": "DEFAULT" + "creationTime": "1729233685041", + "lastModifiedTime": "1729233685041", + "location": "us-central1", + "type": "DEFAULT", + "maxTimeTravelHours": "168" } headers: Content-Type: - application/json; charset=UTF-8 status: 200 OK code: 200 - duration: 436.728931ms + duration: 163.479275ms - id: 16 request: proto: HTTP/1.1 @@ -766,4 +770,4 @@ interactions: - application/json; charset=UTF-8 status: 204 No Content code: 204 - duration: 391.253016ms + duration: 191.252482ms diff --git a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml index 0ee6474d32..32ef5d84e8 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquery/v1beta1/bigquerytable/dependencies.yaml @@ -16,3 +16,5 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: bigquerydatasetsample${uniqueId} +spec: + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log index 6f2bafb6f2..1c46ce6b25 100644 --- a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/_http.log @@ -42,7 +42,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -88,7 +88,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -142,7 +142,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" @@ -1413,7 +1413,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml index c3cfc6cd83..9ac0d27274 100644 --- a/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/dataflow/v1beta1/dataflowflextemplatejob/streamingdataflowflextemplatejob/dependencies.yaml @@ -18,6 +18,8 @@ metadata: name: bigquerydataset${uniqueId} annotations: cnrm.cloud.google.com/reconcile-interval-in-seconds: "0" # Make more deterministic +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log index 4cb23f7d40..065b1fac9a 100644 --- a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log +++ b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/_http.log @@ -729,7 +729,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -775,7 +775,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" } @@ -829,7 +829,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" @@ -1607,7 +1607,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml index 19fe071483..ac24492d6e 100644 --- a/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/basic/pubsub/v1beta1/pubsubsubscription/bigquerypubsubsubscription/dependencies.yaml @@ -61,6 +61,7 @@ metadata: name: bigquerydataset-${uniqueId} spec: resourceID: bigquerydataset${uniqueId} + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_export_projectid.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml index 002fd319cd..55b7474270 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_generated_object_projectid.golden.yaml @@ -15,6 +15,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml b/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml index 9c84ae2468..ae60ea9963 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/create.yaml @@ -20,3 +20,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml b/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml index 0692aa1b85..6664f6fcb2 100644 --- a/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml +++ b/pkg/test/resourcefixture/testdata/containerannotations/projectid/update.yaml @@ -20,3 +20,4 @@ metadata: name: bigquerydatasetsample${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_export_bigquerydataset.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml index e51bf29cb4..cc5af0df9e 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_generated_object_bigquerydataset.golden.yaml @@ -15,6 +15,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml index 4875c42478..6f7cabcd6a 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/create.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/reconcile-interval-in-seconds : "5" spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml index 621d320506..f4aa168169 100644 --- a/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml +++ b/pkg/test/resourcefixture/testdata/reconcileintervalannotations/bigquerydataset/update.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/reconcile-interval-in-seconds : "10" spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log index 9c2d5b2eee..794acc3ecd 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log +++ b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/_http.log @@ -42,7 +42,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -88,7 +88,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" } @@ -142,7 +142,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" @@ -445,7 +445,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_resourceid_${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml index a6863fd831..7130aee1be 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/referencewithuserspecifiedresourceid/dependencies.yaml @@ -18,3 +18,4 @@ metadata: name: bigquerydataset-resourceid-${uniqueId} spec: resourceID: bigquerydataset_resourceid_${uniqueId} + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden index c6deeeb8c5..5f60085e3c 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_export_userspecifiedresourceid.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml index 78a3f1328d..10cf7fe794 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_generated_object_userspecifiedresourceid.golden.yaml @@ -14,6 +14,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydataset_${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log index fd92a16a92..323f1ba5db 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydataset_${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml index c6310cccaa..fb453ab139 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/create.yaml @@ -19,5 +19,4 @@ metadata: spec: resourceID: bigquerydataset_${uniqueId} friendlyName: bigquerydataset-sample - projectRef: - external: ${projectId} + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml index 3b599c01d3..9403a75c8d 100644 --- a/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml +++ b/pkg/test/resourcefixture/testdata/resourceid/userspecifiedresourceid/update.yaml @@ -19,5 +19,4 @@ metadata: spec: resourceID: bigquerydataset_${uniqueId} friendlyName: bigquerydataset-sample-updated - projectRef: - external: ${projectId} + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden index 95391263e2..6c64b9259f 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_export_bigquerydataset#01.golden @@ -18,7 +18,7 @@ spec: - role: WRITER specialGroup: projectWriters friendlyName: bigquerydataset-sample-updated - location: US + location: us-central1 maxTimeTravelHours: "168" projectRef: external: ${projectId} diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml index 3c4ff9cc03..e1b26c8300 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_generated_object_bigquerydataset#01.golden.yaml @@ -1,17 +1,3 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: @@ -28,6 +14,7 @@ metadata: namespace: ${uniqueId} spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 projectRef: external: ${projectId} resourceID: bigquerydatasetsample${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log index 4f53cdab6e..7db6b66ec7 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/_http.log @@ -43,7 +43,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US" + "location": "us-central1" } 200 OK @@ -90,7 +90,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" } @@ -145,7 +145,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -184,7 +184,7 @@ User-Agent: Terraform/ (+https://www.terraform.io) Terraform-Plugin-SDK/2.10.1 t "cnrm-test": "true", "managed-by-cnrm": "true" }, - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168" } @@ -232,7 +232,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" @@ -288,7 +288,7 @@ X-Xss-Protection: 0 "managed-by-cnrm": "true" }, "lastModifiedTime": "123456789", - "location": "US", + "location": "us-central1", "maxTimeTravelHours": "168", "selfLink": "https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}/datasets/bigquerydatasetsample${uniqueId}", "type": "DEFAULT" diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml index e1f01a5a3e..31fa868bdc 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/create.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/state-into-spec: absent spec: friendlyName: bigquerydataset-sample + location: us-central1 diff --git a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml index fba11be2c9..4e3b2fde98 100644 --- a/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml +++ b/pkg/test/resourcefixture/testdata/stateabsentinspec/bigquerydataset/update.yaml @@ -20,3 +20,4 @@ metadata: cnrm.cloud.google.com/state-into-spec: absent spec: friendlyName: bigquerydataset-sample-updated + location: us-central1 diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md index b7e523671d..793457b24d 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerydataset.md @@ -189,7 +189,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.{% endverbatim %}

+

{% verbatim %}A unique Id for this dataset, without the project name. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.{% endverbatim %}

@@ -199,7 +199,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the project containing this dataset.{% endverbatim %}

+

{% verbatim %}The ID of the project containing this dataset.{% endverbatim %}

@@ -289,7 +289,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the dataset containing this routine.{% endverbatim %}

+

{% verbatim %}The ID of the dataset containing this routine.{% endverbatim %}

@@ -299,7 +299,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the project containing this routine.{% endverbatim %}

+

{% verbatim %}The ID of the project containing this routine.{% endverbatim %}

@@ -309,7 +309,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.{% endverbatim %}

+

{% verbatim %}The Id of the routine. The Id must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.{% endverbatim %}

@@ -356,7 +356,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the dataset containing this table.{% endverbatim %}

+

{% verbatim %}The ID of the dataset containing this table.{% endverbatim %}

@@ -366,7 +366,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the project containing this table.{% endverbatim %}

+

{% verbatim %}The ID of the project containing this table.{% endverbatim %}

@@ -376,7 +376,7 @@ storageBillingModel: string

string

-

{% verbatim %}Required. The ID of the table. The ID can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table ID with a partition decorator, such as `sample_table$20190123`.{% endverbatim %}

+

{% verbatim %}The Id of the table. The Id can contain Unicode characters in category L (letter), M (mark), N (number), Pc (connector, including underscore), Pd (dash), and Zs (space). For more information, see [General Category](https://wikipedia.org/wiki/Unicode_character_property#General_Category). The maximum length is 1,024 characters. Certain operations allow suffixing of the table Id with a partition decorator, such as `sample_table$20190123`.{% endverbatim %}

@@ -511,7 +511,7 @@ storageBillingModel: string

location

-

Optional

+

Required*

string

@@ -535,7 +535,7 @@ storageBillingModel: string

object

-

{% verbatim %}The project that this resource belongs to. optional.{% endverbatim %}

+

{% verbatim %} Optional. The project that this resource belongs to.{% endverbatim %}

@@ -616,6 +616,7 @@ conditions: type: string creationTime: integer etag: string +externalRef: string lastModifiedTime: integer observedGeneration: integer selfLink: string @@ -691,6 +692,13 @@ selfLink: string

{% verbatim %}Output only. A hash of the resource.{% endverbatim %}

+ + externalRef + +

string

+

{% verbatim %}A unique specifier for the BigQueryAnalyticsHubDataExchangeListing resource in GCP.{% endverbatim %}

+ + lastModifiedTime diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md index 2b63a482a6..5aa0060080 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquery/bigquerytable.md @@ -1264,6 +1264,7 @@ metadata: name: bigquerytabledep spec: friendlyName: bigquerytable-dep + location: us-central1 ``` diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md index 5653f0783b..ade91e7965 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/dataflow/dataflowflextemplatejob.md @@ -649,6 +649,8 @@ apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataset metadata: name: dataflowflextemplatejobdepstreaming +spec: + location: us-central1 --- apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 kind: BigQueryTable diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md index ca88e19e48..ce5486fa22 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/pubsub/pubsubsubscription.md @@ -947,6 +947,7 @@ metadata: cnrm.cloud.google.com/project-id: ${PROJECT_ID?} spec: resourceID: pubsubsubscriptiondepbigquery + location: us-central1 --- # Replace ${PROJECT_ID?} below with your desired project ID. apiVersion: bigquery.cnrm.cloud.google.com/v1beta1