diff --git a/apis/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig_types.go b/apis/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig_types.go new file mode 100644 index 0000000000..fe193c2e0f --- /dev/null +++ b/apis/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig_types.go @@ -0,0 +1,206 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +import ( + refv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +var BigQueryDataTransferConfigGVK = GroupVersion.WithKind("BigQueryDataTransferConfig") + +// +kcc:proto=google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration +type EncryptionConfiguration struct { + // The KMS key used for encrypting BigQuery data. + KmsKeyRef *refv1beta1.KMSCryptoKeyRef `json:"kmsKeyRef,omitempty"` +} + +// BigQueryDataTransferConfigSpec defines the desired state of BigQueryDataTransferConfig +// +kcc:proto=google.cloud.bigquery.datatransfer.v1.TransferConfig +type BigQueryDataTransferConfigSpec struct { + // The number of days to look back to automatically refresh the data. + // For example, if `data_refresh_window_days = 10`, then every day + // BigQuery reingests data for [today-10, today-1], rather than ingesting data + // for just [today-1]. + // Only valid if the data source supports the feature. Set the value to 0 + // to use the default value. + DataRefreshWindowDays *int32 `json:"dataRefreshWindowDays,omitempty"` + + // +kubebuilder:validation:XValidation:rule="self == oldSelf",message="DataSourceID field is immutable" + // Immutable. + // Data source ID. This cannot be changed once data transfer is created. The + // full list of available data source IDs can be returned through an API call: + // https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list + // +required + DataSourceID *string `json:"dataSourceID,omitempty"` + + // The BigQuery target dataset id. + // +required + DatasetRef *refv1beta1.BigQueryDatasetRef `json:"datasetRef,omitempty"` + + // Is this config disabled. When set to true, no runs will be scheduled for + // this transfer config. + Disabled *bool `json:"disabled,omitempty"` + + // User specified display name for the data transfer. + DisplayName *string `json:"displayName,omitempty"` + + // Email notifications will be sent according to these preferences + // to the email address of the user who owns this transfer config. + EmailPreferences *EmailPreferences `json:"emailPreferences,omitempty"` + + // The encryption configuration part. Currently, it is only used for the + // optional KMS key name. The BigQuery service account of your project must be + // granted permissions to use the key. Read methods will return the key name + // applied in effect. Write methods will apply the key if it is present, or + // otherwise try to apply project default keys if it is absent. + EncryptionConfiguration *EncryptionConfiguration `json:"encryptionConfiguration,omitempty"` + + // Pub/Sub topic where notifications will be sent after transfer runs + // associated with this transfer config finish. + PubSubTopicRef *refv1beta1.PubSubTopicRef `json:"pubSubTopicRef,omitempty"` + + // Parameters specific to each data source. For more information see the + // bq tab in the 'Setting up a data transfer' section for each data source. + // For example the parameters for Cloud Storage transfers are listed here: + // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq + // +required + Params map[string]string `json:"params,omitempty"` + + Parent `json:",inline"` + + // +kubebuilder:validation:XValidation:rule="self == oldSelf",message="ResourceID field is immutable" + // Immutable. + // The BigQueryDataTransferConfig name. If not given, the metadata.name will be used. + ResourceID *string `json:"resourceID,omitempty"` + + // Data transfer schedule. + // If the data source does not support a custom schedule, this should be + // empty. If it is empty, the default value for the data source will be used. + // The specified times are in UTC. + // Examples of valid format: + // `1st,3rd monday of month 15:30`, + // `every wed,fri of jan,jun 13:15`, and + // `first sunday of quarter 00:00`. + // See more explanation about the format here: + // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + // + // NOTE: The minimum interval time between recurring transfers depends on the + // data source; refer to the documentation for your data source. + Schedule *string `json:"schedule,omitempty"` + + // Options customizing the data transfer schedule. + ScheduleOptions *ScheduleOptions `json:"scheduleOptions,omitempty"` + + // Service account email. If this field is set, the transfer config will be created with this service account's credentials. + // It requires that the requesting user calling this API has permissions to act as this service account. + // Note that not all data sources support service account credentials when creating a transfer config. + // For the latest list of data sources, please refer to https://cloud.google.com/bigquery/docs/use-service-accounts. + ServiceAccountRef *refv1beta1.IAMServiceAccountRef `json:"serviceAccountRef,omitempty"` +} + +type Parent struct { + // +required + ProjectRef *refv1beta1.ProjectRef `json:"projectRef"` + + // +kubebuilder:validation:XValidation:rule="self == oldSelf",message="Location field is immutable" + // Immutable. + // +required + Location string `json:"location"` +} + +// BigQueryDataTransferConfigStatus defines the config connector machine state of BigQueryDataTransferConfig +type BigQueryDataTransferConfigStatus struct { + /* Conditions represent the latest available observations of the + object's current state. */ + Conditions []v1alpha1.Condition `json:"conditions,omitempty"` + + // ObservedGeneration is the generation of the resource that was most recently observed by the Config Connector controller. If this is equal to metadata.generation, then that means that the current reported status reflects the most recent desired state of the resource. + ObservedGeneration *int64 `json:"observedGeneration,omitempty"` + + // A unique specifier for the BigQueryDataTransferConfig resource in GCP. + ExternalRef *string `json:"externalRef,omitempty"` + + // ObservedState is the state of the resource as most recently observed in GCP. + ObservedState *BigQueryDataTransferConfigObservedState `json:"observedState,omitempty"` +} + +// BigQueryDataTransferConfigSpec defines the desired state of BigQueryDataTransferConfig +// +kcc:proto=google.cloud.bigquery.datatransfer.v1.TransferConfig +type BigQueryDataTransferConfigObservedState struct { + // Output only. Region in which BigQuery dataset is located. + DatasetRegion *string `json:"datasetRegion,omitempty"` + + // Identifier. The resource name of the transfer config. + // Transfer config names have the form either + // `projects/{project_id}/locations/{region}/transferConfigs/{config_id}` or + // `projects/{project_id}/transferConfigs/{config_id}`, + // where `config_id` is usually a UUID, even though it is not + // guaranteed or required. The name is ignored when creating a transfer + // config. + Name *string `json:"name,omitempty"` + + // Output only. Next time when data transfer will run. + NextRunTime *string `json:"nextRunTime,omitempty"` + + // Output only. Information about the user whose credentials are used to + // transfer data. Populated only for `transferConfigs.get` requests. In case + // the user information is not available, this field will not be populated. + OwnerInfo *UserInfo `json:"ownerInfo,omitempty"` + + // Output only. State of the most recently updated transfer run. + State *string `json:"state,omitempty"` + + // Output only. Data transfer modification time. Ignored by server on input. + UpdateTime *string `json:"updateTime,omitempty"` + + // Deprecated. Unique ID of the user on whose behalf transfer is done. + UserID *int64 `json:"userID,omitempty"` +} + +// +genclient +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// +kubebuilder:resource:categories=gcp +// +kubebuilder:subresource:status +// +kubebuilder:metadata:labels="cnrm.cloud.google.com/managed-by-kcc=true";"cnrm.cloud.google.com/system=true" +// +kubebuilder:printcolumn:name="Age",JSONPath=".metadata.creationTimestamp",type="date" +// +kubebuilder:printcolumn:name="Ready",JSONPath=".status.conditions[?(@.type=='Ready')].status",type="string",description="When 'True', the most recent reconcile of the resource succeeded" +// +kubebuilder:printcolumn:name="Status",JSONPath=".status.conditions[?(@.type=='Ready')].reason",type="string",description="The reason for the value in 'Ready'" +// +kubebuilder:printcolumn:name="Status Age",JSONPath=".status.conditions[?(@.type=='Ready')].lastTransitionTime",type="date",description="The last transition time for the value in 'Status'" +// +kubebuilder:storageversion + +// BigQueryDataTransferConfig is the Schema for the BigQueryDataTransferConfig API +// +k8s:openapi-gen=true +type BigQueryDataTransferConfig struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + + // +required + Spec BigQueryDataTransferConfigSpec `json:"spec,omitempty"` + Status BigQueryDataTransferConfigStatus `json:"status,omitempty"` +} + +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// BigQueryDataTransferConfigList contains a list of BigQueryDataTransferConfig +type BigQueryDataTransferConfigList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []BigQueryDataTransferConfig `json:"items"` +} + +func init() { + SchemeBuilder.Register(&BigQueryDataTransferConfig{}, &BigQueryDataTransferConfigList{}) +} diff --git a/apis/bigquerydatatransfer/v1beta1/doc.go b/apis/bigquerydatatransfer/v1beta1/doc.go new file mode 100644 index 0000000000..d3654a93be --- /dev/null +++ b/apis/bigquerydatatransfer/v1beta1/doc.go @@ -0,0 +1,16 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +kcc:proto=google.cloud.bigquery.datatransfer.v1 +package v1beta1 diff --git a/apis/bigquerydatatransfer/v1beta1/groupversion_info.go b/apis/bigquerydatatransfer/v1beta1/groupversion_info.go new file mode 100644 index 0000000000..52d310b4a8 --- /dev/null +++ b/apis/bigquerydatatransfer/v1beta1/groupversion_info.go @@ -0,0 +1,33 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +kubebuilder:object:generate=true +// +groupName=bigquerydatatransfer.cnrm.cloud.google.com +package v1beta1 + +import ( + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // GroupVersion is group version used to register these objects + GroupVersion = schema.GroupVersion{Group: "bigquerydatatransfer.cnrm.cloud.google.com", Version: "v1beta1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme + SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} + + // AddToScheme adds the types in this group-version to the given scheme. + AddToScheme = SchemeBuilder.AddToScheme +) diff --git a/apis/bigquerydatatransfer/v1beta1/types.generated.go b/apis/bigquerydatatransfer/v1beta1/types.generated.go new file mode 100644 index 0000000000..d21b695ff7 --- /dev/null +++ b/apis/bigquerydatatransfer/v1beta1/types.generated.go @@ -0,0 +1,49 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +// +kcc:proto=google.cloud.bigquery.datatransfer.v1.EmailPreferences +type EmailPreferences struct { + // If true, email notifications will be sent on transfer run failures. + EnableFailureEmail *bool `json:"enableFailureEmail,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.datatransfer.v1.ScheduleOptions +type ScheduleOptions struct { + // If true, automatic scheduling of data transfer runs for this configuration + // will be disabled. The runs can be started on ad-hoc basis using + // StartManualTransferRuns API. When automatic scheduling is disabled, the + // TransferConfig.schedule field will be ignored. + DisableAutoScheduling *bool `json:"disableAutoScheduling,omitempty"` + + // Specifies time to start scheduling transfer runs. The first run will be + // scheduled at or after the start time according to a recurrence pattern + // defined in the schedule string. The start time can be changed at any + // moment. The time when a data transfer can be triggered manually is not + // limited by this option. + StartTime *string `json:"startTime,omitempty"` + + // Defines time to stop scheduling transfer runs. A transfer run cannot be + // scheduled at or after the end time. The end time can be changed at any + // moment. The time when a data transfer can be triggered manually is not + // limited by this option. + EndTime *string `json:"endTime,omitempty"` +} + +// +kcc:proto=google.cloud.bigquery.datatransfer.v1.UserInfo +type UserInfo struct { + // E-mail address of the user. + Email *string `json:"email,omitempty"` +} diff --git a/apis/bigquerydatatransfer/v1beta1/zz_generated.deepcopy.go b/apis/bigquerydatatransfer/v1beta1/zz_generated.deepcopy.go new file mode 100644 index 0000000000..f441dc673c --- /dev/null +++ b/apis/bigquerydatatransfer/v1beta1/zz_generated.deepcopy.go @@ -0,0 +1,362 @@ +//go:build !ignore_autogenerated + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by controller-gen. DO NOT EDIT. + +package v1beta1 + +import ( + refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/apis/k8s/v1alpha1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDataTransferConfig) DeepCopyInto(out *BigQueryDataTransferConfig) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDataTransferConfig. +func (in *BigQueryDataTransferConfig) DeepCopy() *BigQueryDataTransferConfig { + if in == nil { + return nil + } + out := new(BigQueryDataTransferConfig) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *BigQueryDataTransferConfig) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDataTransferConfigList) DeepCopyInto(out *BigQueryDataTransferConfigList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]BigQueryDataTransferConfig, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDataTransferConfigList. +func (in *BigQueryDataTransferConfigList) DeepCopy() *BigQueryDataTransferConfigList { + if in == nil { + return nil + } + out := new(BigQueryDataTransferConfigList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *BigQueryDataTransferConfigList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDataTransferConfigObservedState) DeepCopyInto(out *BigQueryDataTransferConfigObservedState) { + *out = *in + if in.DatasetRegion != nil { + in, out := &in.DatasetRegion, &out.DatasetRegion + *out = new(string) + **out = **in + } + if in.Name != nil { + in, out := &in.Name, &out.Name + *out = new(string) + **out = **in + } + if in.NextRunTime != nil { + in, out := &in.NextRunTime, &out.NextRunTime + *out = new(string) + **out = **in + } + if in.OwnerInfo != nil { + in, out := &in.OwnerInfo, &out.OwnerInfo + *out = new(UserInfo) + (*in).DeepCopyInto(*out) + } + if in.State != nil { + in, out := &in.State, &out.State + *out = new(string) + **out = **in + } + if in.UpdateTime != nil { + in, out := &in.UpdateTime, &out.UpdateTime + *out = new(string) + **out = **in + } + if in.UserID != nil { + in, out := &in.UserID, &out.UserID + *out = new(int64) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDataTransferConfigObservedState. +func (in *BigQueryDataTransferConfigObservedState) DeepCopy() *BigQueryDataTransferConfigObservedState { + if in == nil { + return nil + } + out := new(BigQueryDataTransferConfigObservedState) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDataTransferConfigSpec) DeepCopyInto(out *BigQueryDataTransferConfigSpec) { + *out = *in + if in.DataRefreshWindowDays != nil { + in, out := &in.DataRefreshWindowDays, &out.DataRefreshWindowDays + *out = new(int32) + **out = **in + } + if in.DataSourceID != nil { + in, out := &in.DataSourceID, &out.DataSourceID + *out = new(string) + **out = **in + } + if in.DatasetRef != nil { + in, out := &in.DatasetRef, &out.DatasetRef + *out = new(refsv1beta1.BigQueryDatasetRef) + **out = **in + } + if in.Disabled != nil { + in, out := &in.Disabled, &out.Disabled + *out = new(bool) + **out = **in + } + if in.DisplayName != nil { + in, out := &in.DisplayName, &out.DisplayName + *out = new(string) + **out = **in + } + if in.EmailPreferences != nil { + in, out := &in.EmailPreferences, &out.EmailPreferences + *out = new(EmailPreferences) + (*in).DeepCopyInto(*out) + } + if in.EncryptionConfiguration != nil { + in, out := &in.EncryptionConfiguration, &out.EncryptionConfiguration + *out = new(EncryptionConfiguration) + (*in).DeepCopyInto(*out) + } + if in.PubSubTopicRef != nil { + in, out := &in.PubSubTopicRef, &out.PubSubTopicRef + *out = new(refsv1beta1.PubSubTopicRef) + **out = **in + } + if in.Params != nil { + in, out := &in.Params, &out.Params + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + in.Parent.DeepCopyInto(&out.Parent) + if in.ResourceID != nil { + in, out := &in.ResourceID, &out.ResourceID + *out = new(string) + **out = **in + } + if in.Schedule != nil { + in, out := &in.Schedule, &out.Schedule + *out = new(string) + **out = **in + } + if in.ScheduleOptions != nil { + in, out := &in.ScheduleOptions, &out.ScheduleOptions + *out = new(ScheduleOptions) + (*in).DeepCopyInto(*out) + } + if in.ServiceAccountRef != nil { + in, out := &in.ServiceAccountRef, &out.ServiceAccountRef + *out = new(refsv1beta1.IAMServiceAccountRef) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDataTransferConfigSpec. +func (in *BigQueryDataTransferConfigSpec) DeepCopy() *BigQueryDataTransferConfigSpec { + if in == nil { + return nil + } + out := new(BigQueryDataTransferConfigSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *BigQueryDataTransferConfigStatus) DeepCopyInto(out *BigQueryDataTransferConfigStatus) { + *out = *in + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]v1alpha1.Condition, len(*in)) + copy(*out, *in) + } + if in.ObservedGeneration != nil { + in, out := &in.ObservedGeneration, &out.ObservedGeneration + *out = new(int64) + **out = **in + } + if in.ExternalRef != nil { + in, out := &in.ExternalRef, &out.ExternalRef + *out = new(string) + **out = **in + } + if in.ObservedState != nil { + in, out := &in.ObservedState, &out.ObservedState + *out = new(BigQueryDataTransferConfigObservedState) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDataTransferConfigStatus. +func (in *BigQueryDataTransferConfigStatus) DeepCopy() *BigQueryDataTransferConfigStatus { + if in == nil { + return nil + } + out := new(BigQueryDataTransferConfigStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EmailPreferences) DeepCopyInto(out *EmailPreferences) { + *out = *in + if in.EnableFailureEmail != nil { + in, out := &in.EnableFailureEmail, &out.EnableFailureEmail + *out = new(bool) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EmailPreferences. +func (in *EmailPreferences) DeepCopy() *EmailPreferences { + if in == nil { + return nil + } + out := new(EmailPreferences) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *EncryptionConfiguration) DeepCopyInto(out *EncryptionConfiguration) { + *out = *in + if in.KmsKeyRef != nil { + in, out := &in.KmsKeyRef, &out.KmsKeyRef + *out = new(refsv1beta1.KMSCryptoKeyRef) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new EncryptionConfiguration. +func (in *EncryptionConfiguration) DeepCopy() *EncryptionConfiguration { + if in == nil { + return nil + } + out := new(EncryptionConfiguration) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Parent) DeepCopyInto(out *Parent) { + *out = *in + if in.ProjectRef != nil { + in, out := &in.ProjectRef, &out.ProjectRef + *out = new(refsv1beta1.ProjectRef) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Parent. +func (in *Parent) DeepCopy() *Parent { + if in == nil { + return nil + } + out := new(Parent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ScheduleOptions) DeepCopyInto(out *ScheduleOptions) { + *out = *in + if in.DisableAutoScheduling != nil { + in, out := &in.DisableAutoScheduling, &out.DisableAutoScheduling + *out = new(bool) + **out = **in + } + if in.StartTime != nil { + in, out := &in.StartTime, &out.StartTime + *out = new(string) + **out = **in + } + if in.EndTime != nil { + in, out := &in.EndTime, &out.EndTime + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduleOptions. +func (in *ScheduleOptions) DeepCopy() *ScheduleOptions { + if in == nil { + return nil + } + out := new(ScheduleOptions) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *UserInfo) DeepCopyInto(out *UserInfo) { + *out = *in + if in.Email != nil { + in, out := &in.Email, &out.Email + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new UserInfo. +func (in *UserInfo) DeepCopy() *UserInfo { + if in == nil { + return nil + } + out := new(UserInfo) + in.DeepCopyInto(out) + return out +} diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml index 29567d7a7f..e33b452320 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml @@ -406,6 +406,395 @@ spec: - spec type: object served: true + storage: false + subresources: + status: {} + - additionalPrinterColumns: + - jsonPath: .metadata.creationTimestamp + name: Age + type: date + - description: When 'True', the most recent reconcile of the resource succeeded + jsonPath: .status.conditions[?(@.type=='Ready')].status + name: Ready + type: string + - description: The reason for the value in 'Ready' + jsonPath: .status.conditions[?(@.type=='Ready')].reason + name: Status + type: string + - description: The last transition time for the value in 'Status' + jsonPath: .status.conditions[?(@.type=='Ready')].lastTransitionTime + name: Status Age + type: date + name: v1beta1 + schema: + openAPIV3Schema: + description: BigQueryDataTransferConfig is the Schema for the BigQueryDataTransferConfig + API + properties: + apiVersion: + description: 'APIVersion defines the versioned schema of this representation + of an object. Servers should convert recognized schemas to the latest + internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' + type: string + kind: + description: 'Kind is a string value representing the REST resource this + object represents. Servers may infer this from the endpoint the client + submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' + type: string + metadata: + type: object + spec: + description: BigQueryDataTransferConfigSpec defines the desired state + of BigQueryDataTransferConfig + properties: + dataRefreshWindowDays: + description: The number of days to look back to automatically refresh + the data. For example, if `data_refresh_window_days = 10`, then + every day BigQuery reingests data for [today-10, today-1], rather + than ingesting data for just [today-1]. Only valid if the data source + supports the feature. Set the value to 0 to use the default value. + format: int32 + type: integer + dataSourceID: + description: 'Immutable. Data source ID. This cannot be changed once + data transfer is created. The full list of available data source + IDs can be returned through an API call: https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list' + type: string + x-kubernetes-validations: + - message: DataSourceID field is immutable + rule: self == oldSelf + datasetRef: + description: The BigQuery target dataset id. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: If provided must be in the format `projects/[project_id]/datasets/[dataset_id]`. + type: string + name: + description: The `metadata.name` field of a `BigQueryDataset` + resource. + type: string + namespace: + description: The `metadata.namespace` field of a `BigQueryDataset` + resource. + type: string + type: object + disabled: + description: Is this config disabled. When set to true, no runs will + be scheduled for this transfer config. + type: boolean + displayName: + description: User specified display name for the data transfer. + type: string + emailPreferences: + description: Email notifications will be sent according to these preferences + to the email address of the user who owns this transfer config. + properties: + enableFailureEmail: + description: If true, email notifications will be sent on transfer + run failures. + type: boolean + type: object + encryptionConfiguration: + description: The encryption configuration part. Currently, it is only + used for the optional KMS key name. The BigQuery service account + of your project must be granted permissions to use the key. Read + methods will return the key name applied in effect. Write methods + will apply the key if it is present, or otherwise try to apply project + default keys if it is absent. + properties: + kmsKeyRef: + description: The KMS key used for encrypting BigQuery data. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: A reference to an externally managed KMSCryptoKey. + Should be in the format `projects/[kms_project_id]/locations/[region]/keyRings/[key_ring_id]/cryptoKeys/[key]`. + type: string + name: + description: The `name` of a `KMSCryptoKey` resource. + type: string + namespace: + description: The `namespace` of a `KMSCryptoKey` resource. + type: string + type: object + type: object + location: + description: Immutable. + type: string + x-kubernetes-validations: + - message: Location field is immutable + rule: self == oldSelf + params: + additionalProperties: + type: string + description: 'Parameters specific to each data source. For more information + see the bq tab in the ''Setting up a data transfer'' section for + each data source. For example the parameters for Cloud Storage transfers + are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq' + type: object + projectRef: + description: The Project that this resource belongs to. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: The `projectID` field of a project, when not managed + by Config Connector. + type: string + kind: + description: The kind of the Project resource; optional but must + be `Project` if provided. + type: string + name: + description: The `name` field of a `Project` resource. + type: string + namespace: + description: The `namespace` field of a `Project` resource. + type: string + type: object + pubSubTopicRef: + description: Pub/Sub topic where notifications will be sent after + transfer runs associated with this transfer config finish. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: If provided must be in the format `projects/[project_id]/topics/[topic_id]`. + type: string + name: + description: The `metadata.name` field of a `PubSubTopic` resource. + type: string + namespace: + description: The `metadata.namespace` field of a `PubSubTopic` + resource. + type: string + type: object + resourceID: + description: Immutable. The BigQueryDataTransferConfig name. If not + given, the metadata.name will be used. + type: string + x-kubernetes-validations: + - message: ResourceID field is immutable + rule: self == oldSelf + schedule: + description: |- + Data transfer schedule. + If the data source does not support a custom schedule, this should be + empty. If it is empty, the default value for the data source will be used. + The specified times are in UTC. + Examples of valid format: + `1st,3rd monday of month 15:30`, + `every wed,fri of jan,jun 13:15`, and + `first sunday of quarter 00:00`. + See more explanation about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + + NOTE: The minimum interval time between recurring transfers depends on the + data source; refer to the documentation for your data source. + type: string + scheduleOptions: + description: Options customizing the data transfer schedule. + properties: + disableAutoScheduling: + description: If true, automatic scheduling of data transfer runs + for this configuration will be disabled. The runs can be started + on ad-hoc basis using StartManualTransferRuns API. When automatic + scheduling is disabled, the TransferConfig.schedule field will + be ignored. + type: boolean + endTime: + description: Defines time to stop scheduling transfer runs. A + transfer run cannot be scheduled at or after the end time. The + end time can be changed at any moment. The time when a data + transfer can be triggered manually is not limited by this option. + type: string + startTime: + description: Specifies time to start scheduling transfer runs. + The first run will be scheduled at or after the start time according + to a recurrence pattern defined in the schedule string. The + start time can be changed at any moment. The time when a data + transfer can be triggered manually is not limited by this option. + type: string + type: object + serviceAccountRef: + description: Service account email. If this field is set, the transfer + config will be created with this service account's credentials. + It requires that the requesting user calling this API has permissions + to act as this service account. Note that not all data sources support + service account credentials when creating a transfer config. For + the latest list of data sources, please refer to https://cloud.google.com/bigquery/docs/use-service-accounts. + oneOf: + - not: + required: + - external + required: + - name + - not: + anyOf: + - required: + - name + - required: + - namespace + required: + - external + properties: + external: + description: The `email` field of an `IAMServiceAccount` resource. + type: string + name: + description: 'Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names' + type: string + namespace: + description: 'Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/' + type: string + type: object + required: + - dataSourceID + - datasetRef + - location + - params + - projectRef + type: object + status: + description: BigQueryDataTransferConfigStatus defines the config connector + machine state of BigQueryDataTransferConfig + properties: + conditions: + description: Conditions represent the latest available observations + of the object's current state. + items: + properties: + lastTransitionTime: + description: Last time the condition transitioned from one status + to another. + type: string + message: + description: Human-readable message indicating details about + last transition. + type: string + reason: + description: Unique, one-word, CamelCase reason for the condition's + last transition. + type: string + status: + description: Status is the status of the condition. Can be True, + False, Unknown. + type: string + type: + description: Type is the type of the condition. + type: string + type: object + type: array + externalRef: + description: A unique specifier for the BigQueryDataTransferConfig + resource in GCP. + type: string + observedGeneration: + description: ObservedGeneration is the generation of the resource + that was most recently observed by the Config Connector controller. + If this is equal to metadata.generation, then that means that the + current reported status reflects the most recent desired state of + the resource. + format: int64 + type: integer + observedState: + description: ObservedState is the state of the resource as most recently + observed in GCP. + properties: + datasetRegion: + description: Output only. Region in which BigQuery dataset is + located. + type: string + name: + description: Identifier. The resource name of the transfer config. + Transfer config names have the form either `projects/{project_id}/locations/{region}/transferConfigs/{config_id}` + or `projects/{project_id}/transferConfigs/{config_id}`, where + `config_id` is usually a UUID, even though it is not guaranteed + or required. The name is ignored when creating a transfer config. + type: string + nextRunTime: + description: Output only. Next time when data transfer will run. + type: string + ownerInfo: + description: Output only. Information about the user whose credentials + are used to transfer data. Populated only for `transferConfigs.get` + requests. In case the user information is not available, this + field will not be populated. + properties: + email: + description: E-mail address of the user. + type: string + type: object + state: + description: Output only. State of the most recently updated transfer + run. + type: string + updateTime: + description: Output only. Data transfer modification time. Ignored + by server on input. + type: string + userID: + description: Deprecated. Unique ID of the user on whose behalf + transfer is done. + format: int64 + type: integer + type: object + type: object + required: + - spec + type: object + served: true storage: true subresources: status: {} diff --git a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquery_v1beta1_bigquerydataset.yaml new file mode 100644 index 0000000000..df478adf76 --- /dev/null +++ b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquery_v1beta1_bigquerydataset.yaml @@ -0,0 +1,21 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatatransferconfigdepsalesforce +spec: + friendlyName: bigquerydatatransferconfig-dep-salesforce + location: us-central1 diff --git a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml new file mode 100644 index 0000000000..7673609b25 --- /dev/null +++ b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml @@ -0,0 +1,32 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataTransferConfig +metadata: + name: bigquerydatatransferconfig-sample-salesforce +spec: + projectRef: + # Replace ${PROJECT_ID?} with your project ID. + external: ${PROJECT_ID?} + location: us-central1 + displayName: "example of big query data transfer config" + dataSourceID: "salesforce" + datasetRef: + name: bigquerydatatransferconfigdepsalesforce + params: + "connector.authentication.oauth.clientId": "client-id" + "connector.authentication.oauth.clientSecret": "client-secret" + "connector.authentication.oauth.myDomain": "MyDomainName" + "assets": "asset-a" diff --git a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/bigquery_v1beta1_bigquerydataset.yaml b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/bigquery_v1beta1_bigquerydataset.yaml new file mode 100644 index 0000000000..4c3fa21205 --- /dev/null +++ b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/bigquery_v1beta1_bigquerydataset.yaml @@ -0,0 +1,21 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatatransferconfigdepscheduledquery +spec: + friendlyName: bigquerydatatransferconfig-dep-scheduledquery + location: us-central1 diff --git a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml new file mode 100644 index 0000000000..cc08144be4 --- /dev/null +++ b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/bigquerydatatransfer_v1beta1_bigquerydatatransferconfig.yaml @@ -0,0 +1,34 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataTransferConfig +metadata: + name: bigquerydatatransferconfig-sample-scheduledquery +spec: + projectRef: + # Replace ${PROJECT_ID?} with your project ID. + external: ${PROJECT_ID?} + location: us-central1 + displayName: "example of scheduled query" + dataSourceID: "scheduled_query" + datasetRef: + name: bigquerydatatransferconfigdepscheduledquery + params: + destination_table_name_template: "my_table" + write_disposition: "WRITE_APPEND" + query: "SELECT name FROM tabl WHERE x = 'y'" + schedule: "first sunday of quarter 00:00" + serviceAccountRef: + name: gsa-dep-scheduledquery diff --git a/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/iam_v1beta1_iamserviceaccount.yaml b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/iam_v1beta1_iamserviceaccount.yaml new file mode 100644 index 0000000000..0f2f4fe308 --- /dev/null +++ b/config/samples/resources/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/iam_v1beta1_iamserviceaccount.yaml @@ -0,0 +1,21 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + # Replace ${PROJECT_ID?} with your project ID. + cnrm.cloud.google.com/project-id: "${PROJECT_ID?}" + name: gsa-dep-scheduledquery diff --git a/config/servicemappings/bigquerydatatransfer.yaml b/config/servicemappings/bigquerydatatransfer.yaml new file mode 100644 index 0000000000..cf2014a15a --- /dev/null +++ b/config/servicemappings/bigquerydatatransfer.yaml @@ -0,0 +1,27 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: core.cnrm.cloud.google.com/v1alpha1 +kind: ServiceMapping +metadata: + name: bigquerydatatransfer.cnrm.cloud.google.com + namespace: cnrm-system +spec: + name: BigQueryDataTransfer + version: v1beta1 + serviceHostName: "bigquerydatatransfer.googleapis.com" + resources: + - name: google_bigquery_data_transfer_config + kind: BigQueryDataTransferConfig + direct: true diff --git a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig_types.go b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig_types.go similarity index 99% rename from pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig_types.go rename to pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig_types.go index 7303dc0550..56bbe81468 100644 --- a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig_types.go +++ b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig_types.go @@ -28,7 +28,7 @@ // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! -package v1alpha1 +package v1beta1 import ( "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" diff --git a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/doc.go b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/doc.go similarity index 92% rename from pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/doc.go rename to pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/doc.go index 49f5044ce1..39a5735f31 100644 --- a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/doc.go +++ b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/doc.go @@ -28,11 +28,11 @@ // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! -// Package v1alpha1 contains API Schema definitions for the bigquerydatatransfer v1alpha1 API group. +// Package v1beta1 contains API Schema definitions for the bigquerydatatransfer v1beta1 API group. // +k8s:openapi-gen=true // +k8s:deepcopy-gen=package,register // +k8s:conversion-gen=github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/pkg/apis/bigquerydatatransfer // +k8s:defaulter-gen=TypeMeta // +groupName=bigquerydatatransfer.cnrm.cloud.google.com -package v1alpha1 +package v1beta1 diff --git a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/register.go b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/register.go similarity index 93% rename from pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/register.go rename to pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/register.go index b70137f930..6a51033d0f 100644 --- a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/register.go +++ b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/register.go @@ -28,13 +28,13 @@ // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! -// Package v1alpha1 contains API Schema definitions for the bigquerydatatransfer v1alpha1 API group. +// Package v1beta1 contains API Schema definitions for the bigquerydatatransfer v1beta1 API group. // +k8s:openapi-gen=true // +k8s:deepcopy-gen=package,register // +k8s:conversion-gen=github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/pkg/apis/bigquerydatatransfer // +k8s:defaulter-gen=TypeMeta // +groupName=bigquerydatatransfer.cnrm.cloud.google.com -package v1alpha1 +package v1beta1 import ( "reflect" @@ -45,7 +45,7 @@ import ( var ( // SchemeGroupVersion is the group version used to register these objects. - SchemeGroupVersion = schema.GroupVersion{Group: "bigquerydatatransfer.cnrm.cloud.google.com", Version: "v1alpha1"} + SchemeGroupVersion = schema.GroupVersion{Group: "bigquerydatatransfer.cnrm.cloud.google.com", Version: "v1beta1"} // SchemeBuilder is used to add go types to the GroupVersionKind scheme. SchemeBuilder = &scheme.Builder{GroupVersion: SchemeGroupVersion} diff --git a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/zz_generated.deepcopy.go b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/zz_generated.deepcopy.go similarity index 97% rename from pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/zz_generated.deepcopy.go rename to pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/zz_generated.deepcopy.go index 4b64999896..9dd301035f 100644 --- a/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1/zz_generated.deepcopy.go +++ b/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1/zz_generated.deepcopy.go @@ -22,10 +22,10 @@ // Code generated by deepcopy-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 import ( - k8sv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" + v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/k8s/v1alpha1" runtime "k8s.io/apimachinery/pkg/runtime" ) @@ -129,7 +129,7 @@ func (in *BigQueryDataTransferConfigSpec) DeepCopyInto(out *BigQueryDataTransfer out.ProjectRef = in.ProjectRef if in.PubSubTopicRef != nil { in, out := &in.PubSubTopicRef, &out.PubSubTopicRef - *out = new(k8sv1alpha1.ResourceRef) + *out = new(v1alpha1.ResourceRef) **out = **in } if in.ResourceID != nil { @@ -149,7 +149,7 @@ func (in *BigQueryDataTransferConfigSpec) DeepCopyInto(out *BigQueryDataTransfer } if in.ServiceAccountRef != nil { in, out := &in.ServiceAccountRef, &out.ServiceAccountRef - *out = new(k8sv1alpha1.ResourceRef) + *out = new(v1alpha1.ResourceRef) **out = **in } return @@ -170,7 +170,7 @@ func (in *BigQueryDataTransferConfigStatus) DeepCopyInto(out *BigQueryDataTransf *out = *in if in.Conditions != nil { in, out := &in.Conditions, &out.Conditions - *out = make([]k8sv1alpha1.Condition, len(*in)) + *out = make([]v1alpha1.Condition, len(*in)) copy(*out, *in) } if in.ExternalRef != nil { @@ -227,7 +227,7 @@ func (in *ConfigEncryptionConfiguration) DeepCopyInto(out *ConfigEncryptionConfi *out = *in if in.KmsKeyRef != nil { in, out := &in.KmsKeyRef, &out.KmsKeyRef - *out = new(k8sv1alpha1.ResourceRef) + *out = new(v1alpha1.ResourceRef) **out = **in } return diff --git a/pkg/clients/generated/client/clientset/versioned/clientset.go b/pkg/clients/generated/client/clientset/versioned/clientset.go index 167f411710..7a3b84e330 100644 --- a/pkg/clients/generated/client/clientset/versioned/clientset.go +++ b/pkg/clients/generated/client/clientset/versioned/clientset.go @@ -40,7 +40,7 @@ import ( bigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryanalyticshub/v1alpha1" bigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryconnection/v1alpha1" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatapolicy/v1alpha1" - bigquerydatatransferv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1" + bigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1" bigqueryreservationv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryreservation/v1alpha1" bigtablev1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigtable/v1beta1" billingbudgetsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/billingbudgets/v1beta1" @@ -168,7 +168,7 @@ type Interface interface { BigqueryanalyticshubV1alpha1() bigqueryanalyticshubv1alpha1.BigqueryanalyticshubV1alpha1Interface BigqueryconnectionV1alpha1() bigqueryconnectionv1alpha1.BigqueryconnectionV1alpha1Interface BigquerydatapolicyV1alpha1() bigquerydatapolicyv1alpha1.BigquerydatapolicyV1alpha1Interface - BigquerydatatransferV1alpha1() bigquerydatatransferv1alpha1.BigquerydatatransferV1alpha1Interface + BigquerydatatransferV1beta1() bigquerydatatransferv1beta1.BigquerydatatransferV1beta1Interface BigqueryreservationV1alpha1() bigqueryreservationv1alpha1.BigqueryreservationV1alpha1Interface BigtableV1beta1() bigtablev1beta1.BigtableV1beta1Interface BillingbudgetsV1beta1() billingbudgetsv1beta1.BillingbudgetsV1beta1Interface @@ -294,7 +294,7 @@ type Clientset struct { bigqueryanalyticshubV1alpha1 *bigqueryanalyticshubv1alpha1.BigqueryanalyticshubV1alpha1Client bigqueryconnectionV1alpha1 *bigqueryconnectionv1alpha1.BigqueryconnectionV1alpha1Client bigquerydatapolicyV1alpha1 *bigquerydatapolicyv1alpha1.BigquerydatapolicyV1alpha1Client - bigquerydatatransferV1alpha1 *bigquerydatatransferv1alpha1.BigquerydatatransferV1alpha1Client + bigquerydatatransferV1beta1 *bigquerydatatransferv1beta1.BigquerydatatransferV1beta1Client bigqueryreservationV1alpha1 *bigqueryreservationv1alpha1.BigqueryreservationV1alpha1Client bigtableV1beta1 *bigtablev1beta1.BigtableV1beta1Client billingbudgetsV1beta1 *billingbudgetsv1beta1.BillingbudgetsV1beta1Client @@ -477,9 +477,9 @@ func (c *Clientset) BigquerydatapolicyV1alpha1() bigquerydatapolicyv1alpha1.Bigq return c.bigquerydatapolicyV1alpha1 } -// BigquerydatatransferV1alpha1 retrieves the BigquerydatatransferV1alpha1Client -func (c *Clientset) BigquerydatatransferV1alpha1() bigquerydatatransferv1alpha1.BigquerydatatransferV1alpha1Interface { - return c.bigquerydatatransferV1alpha1 +// BigquerydatatransferV1beta1 retrieves the BigquerydatatransferV1beta1Client +func (c *Clientset) BigquerydatatransferV1beta1() bigquerydatatransferv1beta1.BigquerydatatransferV1beta1Interface { + return c.bigquerydatatransferV1beta1 } // BigqueryreservationV1alpha1 retrieves the BigqueryreservationV1alpha1Client @@ -1111,7 +1111,7 @@ func NewForConfigAndClient(c *rest.Config, httpClient *http.Client) (*Clientset, if err != nil { return nil, err } - cs.bigquerydatatransferV1alpha1, err = bigquerydatatransferv1alpha1.NewForConfigAndClient(&configShallowCopy, httpClient) + cs.bigquerydatatransferV1beta1, err = bigquerydatatransferv1beta1.NewForConfigAndClient(&configShallowCopy, httpClient) if err != nil { return nil, err } @@ -1571,7 +1571,7 @@ func New(c rest.Interface) *Clientset { cs.bigqueryanalyticshubV1alpha1 = bigqueryanalyticshubv1alpha1.New(c) cs.bigqueryconnectionV1alpha1 = bigqueryconnectionv1alpha1.New(c) cs.bigquerydatapolicyV1alpha1 = bigquerydatapolicyv1alpha1.New(c) - cs.bigquerydatatransferV1alpha1 = bigquerydatatransferv1alpha1.New(c) + cs.bigquerydatatransferV1beta1 = bigquerydatatransferv1beta1.New(c) cs.bigqueryreservationV1alpha1 = bigqueryreservationv1alpha1.New(c) cs.bigtableV1beta1 = bigtablev1beta1.New(c) cs.billingbudgetsV1beta1 = billingbudgetsv1beta1.New(c) diff --git a/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go b/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go index 98ef76b958..9e28754051 100644 --- a/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go +++ b/pkg/clients/generated/client/clientset/versioned/fake/clientset_generated.go @@ -53,8 +53,8 @@ import ( fakebigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryconnection/v1alpha1/fake" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatapolicy/v1alpha1" fakebigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatapolicy/v1alpha1/fake" - bigquerydatatransferv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1" - fakebigquerydatatransferv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake" + bigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1" + fakebigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake" bigqueryreservationv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryreservation/v1alpha1" fakebigqueryreservationv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigqueryreservation/v1alpha1/fake" bigtablev1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigtable/v1beta1" @@ -397,9 +397,9 @@ func (c *Clientset) BigquerydatapolicyV1alpha1() bigquerydatapolicyv1alpha1.Bigq return &fakebigquerydatapolicyv1alpha1.FakeBigquerydatapolicyV1alpha1{Fake: &c.Fake} } -// BigquerydatatransferV1alpha1 retrieves the BigquerydatatransferV1alpha1Client -func (c *Clientset) BigquerydatatransferV1alpha1() bigquerydatatransferv1alpha1.BigquerydatatransferV1alpha1Interface { - return &fakebigquerydatatransferv1alpha1.FakeBigquerydatatransferV1alpha1{Fake: &c.Fake} +// BigquerydatatransferV1beta1 retrieves the BigquerydatatransferV1beta1Client +func (c *Clientset) BigquerydatatransferV1beta1() bigquerydatatransferv1beta1.BigquerydatatransferV1beta1Interface { + return &fakebigquerydatatransferv1beta1.FakeBigquerydatatransferV1beta1{Fake: &c.Fake} } // BigqueryreservationV1alpha1 retrieves the BigqueryreservationV1alpha1Client diff --git a/pkg/clients/generated/client/clientset/versioned/fake/register.go b/pkg/clients/generated/client/clientset/versioned/fake/register.go index 2bbee78fc0..c64bc89a7e 100644 --- a/pkg/clients/generated/client/clientset/versioned/fake/register.go +++ b/pkg/clients/generated/client/clientset/versioned/fake/register.go @@ -37,7 +37,7 @@ import ( bigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1" bigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryconnection/v1alpha1" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatapolicy/v1alpha1" - bigquerydatatransferv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1" + bigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1" bigqueryreservationv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryreservation/v1alpha1" bigtablev1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigtable/v1beta1" billingbudgetsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/billingbudgets/v1beta1" @@ -169,7 +169,7 @@ var localSchemeBuilder = runtime.SchemeBuilder{ bigqueryanalyticshubv1alpha1.AddToScheme, bigqueryconnectionv1alpha1.AddToScheme, bigquerydatapolicyv1alpha1.AddToScheme, - bigquerydatatransferv1alpha1.AddToScheme, + bigquerydatatransferv1beta1.AddToScheme, bigqueryreservationv1alpha1.AddToScheme, bigtablev1beta1.AddToScheme, billingbudgetsv1beta1.AddToScheme, diff --git a/pkg/clients/generated/client/clientset/versioned/scheme/register.go b/pkg/clients/generated/client/clientset/versioned/scheme/register.go index 64556c0e74..f6844223fe 100644 --- a/pkg/clients/generated/client/clientset/versioned/scheme/register.go +++ b/pkg/clients/generated/client/clientset/versioned/scheme/register.go @@ -37,7 +37,7 @@ import ( bigqueryanalyticshubv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryanalyticshub/v1alpha1" bigqueryconnectionv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryconnection/v1alpha1" bigquerydatapolicyv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatapolicy/v1alpha1" - bigquerydatatransferv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1" + bigquerydatatransferv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1" bigqueryreservationv1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigqueryreservation/v1alpha1" bigtablev1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigtable/v1beta1" billingbudgetsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/billingbudgets/v1beta1" @@ -169,7 +169,7 @@ var localSchemeBuilder = runtime.SchemeBuilder{ bigqueryanalyticshubv1alpha1.AddToScheme, bigqueryconnectionv1alpha1.AddToScheme, bigquerydatapolicyv1alpha1.AddToScheme, - bigquerydatatransferv1alpha1.AddToScheme, + bigquerydatatransferv1beta1.AddToScheme, bigqueryreservationv1alpha1.AddToScheme, bigtablev1beta1.AddToScheme, billingbudgetsv1beta1.AddToScheme, diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/bigquerydatatransfer_client.go b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/bigquerydatatransfer_client.go similarity index 64% rename from pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/bigquerydatatransfer_client.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/bigquerydatatransfer_client.go index f5b7d0e5af..a668c85352 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/bigquerydatatransfer_client.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/bigquerydatatransfer_client.go @@ -19,34 +19,34 @@ // Code generated by client-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 import ( "net/http" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/scheme" rest "k8s.io/client-go/rest" ) -type BigquerydatatransferV1alpha1Interface interface { +type BigquerydatatransferV1beta1Interface interface { RESTClient() rest.Interface BigQueryDataTransferConfigsGetter } -// BigquerydatatransferV1alpha1Client is used to interact with features provided by the bigquerydatatransfer.cnrm.cloud.google.com group. -type BigquerydatatransferV1alpha1Client struct { +// BigquerydatatransferV1beta1Client is used to interact with features provided by the bigquerydatatransfer.cnrm.cloud.google.com group. +type BigquerydatatransferV1beta1Client struct { restClient rest.Interface } -func (c *BigquerydatatransferV1alpha1Client) BigQueryDataTransferConfigs(namespace string) BigQueryDataTransferConfigInterface { +func (c *BigquerydatatransferV1beta1Client) BigQueryDataTransferConfigs(namespace string) BigQueryDataTransferConfigInterface { return newBigQueryDataTransferConfigs(c, namespace) } -// NewForConfig creates a new BigquerydatatransferV1alpha1Client for the given config. +// NewForConfig creates a new BigquerydatatransferV1beta1Client for the given config. // NewForConfig is equivalent to NewForConfigAndClient(c, httpClient), // where httpClient was generated with rest.HTTPClientFor(c). -func NewForConfig(c *rest.Config) (*BigquerydatatransferV1alpha1Client, error) { +func NewForConfig(c *rest.Config) (*BigquerydatatransferV1beta1Client, error) { config := *c if err := setConfigDefaults(&config); err != nil { return nil, err @@ -58,9 +58,9 @@ func NewForConfig(c *rest.Config) (*BigquerydatatransferV1alpha1Client, error) { return NewForConfigAndClient(&config, httpClient) } -// NewForConfigAndClient creates a new BigquerydatatransferV1alpha1Client for the given config and http client. +// NewForConfigAndClient creates a new BigquerydatatransferV1beta1Client for the given config and http client. // Note the http client provided takes precedence over the configured transport values. -func NewForConfigAndClient(c *rest.Config, h *http.Client) (*BigquerydatatransferV1alpha1Client, error) { +func NewForConfigAndClient(c *rest.Config, h *http.Client) (*BigquerydatatransferV1beta1Client, error) { config := *c if err := setConfigDefaults(&config); err != nil { return nil, err @@ -69,12 +69,12 @@ func NewForConfigAndClient(c *rest.Config, h *http.Client) (*Bigquerydatatransfe if err != nil { return nil, err } - return &BigquerydatatransferV1alpha1Client{client}, nil + return &BigquerydatatransferV1beta1Client{client}, nil } -// NewForConfigOrDie creates a new BigquerydatatransferV1alpha1Client for the given config and +// NewForConfigOrDie creates a new BigquerydatatransferV1beta1Client for the given config and // panics if there is an error in the config. -func NewForConfigOrDie(c *rest.Config) *BigquerydatatransferV1alpha1Client { +func NewForConfigOrDie(c *rest.Config) *BigquerydatatransferV1beta1Client { client, err := NewForConfig(c) if err != nil { panic(err) @@ -82,13 +82,13 @@ func NewForConfigOrDie(c *rest.Config) *BigquerydatatransferV1alpha1Client { return client } -// New creates a new BigquerydatatransferV1alpha1Client for the given RESTClient. -func New(c rest.Interface) *BigquerydatatransferV1alpha1Client { - return &BigquerydatatransferV1alpha1Client{c} +// New creates a new BigquerydatatransferV1beta1Client for the given RESTClient. +func New(c rest.Interface) *BigquerydatatransferV1beta1Client { + return &BigquerydatatransferV1beta1Client{c} } func setConfigDefaults(config *rest.Config) error { - gv := v1alpha1.SchemeGroupVersion + gv := v1beta1.SchemeGroupVersion config.GroupVersion = &gv config.APIPath = "/apis" config.NegotiatedSerializer = scheme.Codecs.WithoutConversion() @@ -102,7 +102,7 @@ func setConfigDefaults(config *rest.Config) error { // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. -func (c *BigquerydatatransferV1alpha1Client) RESTClient() rest.Interface { +func (c *BigquerydatatransferV1beta1Client) RESTClient() rest.Interface { if c == nil { return nil } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig.go b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig.go similarity index 77% rename from pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig.go index 6e645afb90..17fe9324d1 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig.go @@ -19,13 +19,13 @@ // Code generated by client-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 import ( "context" "time" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1" scheme "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/scheme" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" @@ -41,15 +41,15 @@ type BigQueryDataTransferConfigsGetter interface { // BigQueryDataTransferConfigInterface has methods to work with BigQueryDataTransferConfig resources. type BigQueryDataTransferConfigInterface interface { - Create(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.CreateOptions) (*v1alpha1.BigQueryDataTransferConfig, error) - Update(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (*v1alpha1.BigQueryDataTransferConfig, error) - UpdateStatus(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (*v1alpha1.BigQueryDataTransferConfig, error) + Create(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.CreateOptions) (*v1beta1.BigQueryDataTransferConfig, error) + Update(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (*v1beta1.BigQueryDataTransferConfig, error) + UpdateStatus(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (*v1beta1.BigQueryDataTransferConfig, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error - Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.BigQueryDataTransferConfig, error) - List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.BigQueryDataTransferConfigList, error) + Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.BigQueryDataTransferConfig, error) + List(ctx context.Context, opts v1.ListOptions) (*v1beta1.BigQueryDataTransferConfigList, error) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) - Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.BigQueryDataTransferConfig, err error) + Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.BigQueryDataTransferConfig, err error) BigQueryDataTransferConfigExpansion } @@ -60,7 +60,7 @@ type bigQueryDataTransferConfigs struct { } // newBigQueryDataTransferConfigs returns a BigQueryDataTransferConfigs -func newBigQueryDataTransferConfigs(c *BigquerydatatransferV1alpha1Client, namespace string) *bigQueryDataTransferConfigs { +func newBigQueryDataTransferConfigs(c *BigquerydatatransferV1beta1Client, namespace string) *bigQueryDataTransferConfigs { return &bigQueryDataTransferConfigs{ client: c.RESTClient(), ns: namespace, @@ -68,8 +68,8 @@ func newBigQueryDataTransferConfigs(c *BigquerydatatransferV1alpha1Client, names } // Get takes name of the bigQueryDataTransferConfig, and returns the corresponding bigQueryDataTransferConfig object, and an error if there is any. -func (c *bigQueryDataTransferConfigs) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.BigQueryDataTransferConfig, err error) { - result = &v1alpha1.BigQueryDataTransferConfig{} +func (c *bigQueryDataTransferConfigs) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.BigQueryDataTransferConfig, err error) { + result = &v1beta1.BigQueryDataTransferConfig{} err = c.client.Get(). Namespace(c.ns). Resource("bigquerydatatransferconfigs"). @@ -81,12 +81,12 @@ func (c *bigQueryDataTransferConfigs) Get(ctx context.Context, name string, opti } // List takes label and field selectors, and returns the list of BigQueryDataTransferConfigs that match those selectors. -func (c *bigQueryDataTransferConfigs) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.BigQueryDataTransferConfigList, err error) { +func (c *bigQueryDataTransferConfigs) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.BigQueryDataTransferConfigList, err error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } - result = &v1alpha1.BigQueryDataTransferConfigList{} + result = &v1beta1.BigQueryDataTransferConfigList{} err = c.client.Get(). Namespace(c.ns). Resource("bigquerydatatransferconfigs"). @@ -113,8 +113,8 @@ func (c *bigQueryDataTransferConfigs) Watch(ctx context.Context, opts v1.ListOpt } // Create takes the representation of a bigQueryDataTransferConfig and creates it. Returns the server's representation of the bigQueryDataTransferConfig, and an error, if there is any. -func (c *bigQueryDataTransferConfigs) Create(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.CreateOptions) (result *v1alpha1.BigQueryDataTransferConfig, err error) { - result = &v1alpha1.BigQueryDataTransferConfig{} +func (c *bigQueryDataTransferConfigs) Create(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.CreateOptions) (result *v1beta1.BigQueryDataTransferConfig, err error) { + result = &v1beta1.BigQueryDataTransferConfig{} err = c.client.Post(). Namespace(c.ns). Resource("bigquerydatatransferconfigs"). @@ -126,8 +126,8 @@ func (c *bigQueryDataTransferConfigs) Create(ctx context.Context, bigQueryDataTr } // Update takes the representation of a bigQueryDataTransferConfig and updates it. Returns the server's representation of the bigQueryDataTransferConfig, and an error, if there is any. -func (c *bigQueryDataTransferConfigs) Update(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (result *v1alpha1.BigQueryDataTransferConfig, err error) { - result = &v1alpha1.BigQueryDataTransferConfig{} +func (c *bigQueryDataTransferConfigs) Update(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (result *v1beta1.BigQueryDataTransferConfig, err error) { + result = &v1beta1.BigQueryDataTransferConfig{} err = c.client.Put(). Namespace(c.ns). Resource("bigquerydatatransferconfigs"). @@ -141,8 +141,8 @@ func (c *bigQueryDataTransferConfigs) Update(ctx context.Context, bigQueryDataTr // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *bigQueryDataTransferConfigs) UpdateStatus(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (result *v1alpha1.BigQueryDataTransferConfig, err error) { - result = &v1alpha1.BigQueryDataTransferConfig{} +func (c *bigQueryDataTransferConfigs) UpdateStatus(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (result *v1beta1.BigQueryDataTransferConfig, err error) { + result = &v1beta1.BigQueryDataTransferConfig{} err = c.client.Put(). Namespace(c.ns). Resource("bigquerydatatransferconfigs"). @@ -183,8 +183,8 @@ func (c *bigQueryDataTransferConfigs) DeleteCollection(ctx context.Context, opts } // Patch applies the patch and returns the patched bigQueryDataTransferConfig. -func (c *bigQueryDataTransferConfigs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.BigQueryDataTransferConfig, err error) { - result = &v1alpha1.BigQueryDataTransferConfig{} +func (c *bigQueryDataTransferConfigs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.BigQueryDataTransferConfig, err error) { + result = &v1beta1.BigQueryDataTransferConfig{} err = c.client.Patch(pt). Namespace(c.ns). Resource("bigquerydatatransferconfigs"). diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/doc.go b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/doc.go similarity index 98% rename from pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/doc.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/doc.go index d3dac805d0..41dbecdb4a 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/doc.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/doc.go @@ -20,4 +20,4 @@ // Code generated by client-gen. DO NOT EDIT. // This package has the automatically generated typed clients. -package v1alpha1 +package v1beta1 diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/doc.go b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/doc.go similarity index 100% rename from pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/doc.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/doc.go diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/fake_bigquerydatatransfer_client.go b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/fake_bigquerydatatransfer_client.go similarity index 74% rename from pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/fake_bigquerydatatransfer_client.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/fake_bigquerydatatransfer_client.go index de46d68d29..133ccf50f2 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/fake_bigquerydatatransfer_client.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/fake_bigquerydatatransfer_client.go @@ -22,22 +22,22 @@ package fake import ( - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1" rest "k8s.io/client-go/rest" testing "k8s.io/client-go/testing" ) -type FakeBigquerydatatransferV1alpha1 struct { +type FakeBigquerydatatransferV1beta1 struct { *testing.Fake } -func (c *FakeBigquerydatatransferV1alpha1) BigQueryDataTransferConfigs(namespace string) v1alpha1.BigQueryDataTransferConfigInterface { +func (c *FakeBigquerydatatransferV1beta1) BigQueryDataTransferConfigs(namespace string) v1beta1.BigQueryDataTransferConfigInterface { return &FakeBigQueryDataTransferConfigs{c, namespace} } // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. -func (c *FakeBigquerydatatransferV1alpha1) RESTClient() rest.Interface { +func (c *FakeBigquerydatatransferV1beta1) RESTClient() rest.Interface { var ret *rest.RESTClient return ret } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/fake_bigquerydatatransferconfig.go b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/fake_bigquerydatatransferconfig.go similarity index 67% rename from pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/fake_bigquerydatatransferconfig.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/fake_bigquerydatatransferconfig.go index d229e72736..485eaf0187 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/fake/fake_bigquerydatatransferconfig.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/fake/fake_bigquerydatatransferconfig.go @@ -24,7 +24,7 @@ package fake import ( "context" - v1alpha1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1alpha1" + v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/bigquerydatatransfer/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" labels "k8s.io/apimachinery/pkg/labels" types "k8s.io/apimachinery/pkg/types" @@ -34,29 +34,29 @@ import ( // FakeBigQueryDataTransferConfigs implements BigQueryDataTransferConfigInterface type FakeBigQueryDataTransferConfigs struct { - Fake *FakeBigquerydatatransferV1alpha1 + Fake *FakeBigquerydatatransferV1beta1 ns string } -var bigquerydatatransferconfigsResource = v1alpha1.SchemeGroupVersion.WithResource("bigquerydatatransferconfigs") +var bigquerydatatransferconfigsResource = v1beta1.SchemeGroupVersion.WithResource("bigquerydatatransferconfigs") -var bigquerydatatransferconfigsKind = v1alpha1.SchemeGroupVersion.WithKind("BigQueryDataTransferConfig") +var bigquerydatatransferconfigsKind = v1beta1.SchemeGroupVersion.WithKind("BigQueryDataTransferConfig") // Get takes name of the bigQueryDataTransferConfig, and returns the corresponding bigQueryDataTransferConfig object, and an error if there is any. -func (c *FakeBigQueryDataTransferConfigs) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.BigQueryDataTransferConfig, err error) { +func (c *FakeBigQueryDataTransferConfigs) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.BigQueryDataTransferConfig, err error) { obj, err := c.Fake. - Invokes(testing.NewGetAction(bigquerydatatransferconfigsResource, c.ns, name), &v1alpha1.BigQueryDataTransferConfig{}) + Invokes(testing.NewGetAction(bigquerydatatransferconfigsResource, c.ns, name), &v1beta1.BigQueryDataTransferConfig{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryDataTransferConfig), err + return obj.(*v1beta1.BigQueryDataTransferConfig), err } // List takes label and field selectors, and returns the list of BigQueryDataTransferConfigs that match those selectors. -func (c *FakeBigQueryDataTransferConfigs) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.BigQueryDataTransferConfigList, err error) { +func (c *FakeBigQueryDataTransferConfigs) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.BigQueryDataTransferConfigList, err error) { obj, err := c.Fake. - Invokes(testing.NewListAction(bigquerydatatransferconfigsResource, bigquerydatatransferconfigsKind, c.ns, opts), &v1alpha1.BigQueryDataTransferConfigList{}) + Invokes(testing.NewListAction(bigquerydatatransferconfigsResource, bigquerydatatransferconfigsKind, c.ns, opts), &v1beta1.BigQueryDataTransferConfigList{}) if obj == nil { return nil, err @@ -66,8 +66,8 @@ func (c *FakeBigQueryDataTransferConfigs) List(ctx context.Context, opts v1.List if label == nil { label = labels.Everything() } - list := &v1alpha1.BigQueryDataTransferConfigList{ListMeta: obj.(*v1alpha1.BigQueryDataTransferConfigList).ListMeta} - for _, item := range obj.(*v1alpha1.BigQueryDataTransferConfigList).Items { + list := &v1beta1.BigQueryDataTransferConfigList{ListMeta: obj.(*v1beta1.BigQueryDataTransferConfigList).ListMeta} + for _, item := range obj.(*v1beta1.BigQueryDataTransferConfigList).Items { if label.Matches(labels.Set(item.Labels)) { list.Items = append(list.Items, item) } @@ -83,43 +83,43 @@ func (c *FakeBigQueryDataTransferConfigs) Watch(ctx context.Context, opts v1.Lis } // Create takes the representation of a bigQueryDataTransferConfig and creates it. Returns the server's representation of the bigQueryDataTransferConfig, and an error, if there is any. -func (c *FakeBigQueryDataTransferConfigs) Create(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.CreateOptions) (result *v1alpha1.BigQueryDataTransferConfig, err error) { +func (c *FakeBigQueryDataTransferConfigs) Create(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.CreateOptions) (result *v1beta1.BigQueryDataTransferConfig, err error) { obj, err := c.Fake. - Invokes(testing.NewCreateAction(bigquerydatatransferconfigsResource, c.ns, bigQueryDataTransferConfig), &v1alpha1.BigQueryDataTransferConfig{}) + Invokes(testing.NewCreateAction(bigquerydatatransferconfigsResource, c.ns, bigQueryDataTransferConfig), &v1beta1.BigQueryDataTransferConfig{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryDataTransferConfig), err + return obj.(*v1beta1.BigQueryDataTransferConfig), err } // Update takes the representation of a bigQueryDataTransferConfig and updates it. Returns the server's representation of the bigQueryDataTransferConfig, and an error, if there is any. -func (c *FakeBigQueryDataTransferConfigs) Update(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (result *v1alpha1.BigQueryDataTransferConfig, err error) { +func (c *FakeBigQueryDataTransferConfigs) Update(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (result *v1beta1.BigQueryDataTransferConfig, err error) { obj, err := c.Fake. - Invokes(testing.NewUpdateAction(bigquerydatatransferconfigsResource, c.ns, bigQueryDataTransferConfig), &v1alpha1.BigQueryDataTransferConfig{}) + Invokes(testing.NewUpdateAction(bigquerydatatransferconfigsResource, c.ns, bigQueryDataTransferConfig), &v1beta1.BigQueryDataTransferConfig{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryDataTransferConfig), err + return obj.(*v1beta1.BigQueryDataTransferConfig), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). -func (c *FakeBigQueryDataTransferConfigs) UpdateStatus(ctx context.Context, bigQueryDataTransferConfig *v1alpha1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (*v1alpha1.BigQueryDataTransferConfig, error) { +func (c *FakeBigQueryDataTransferConfigs) UpdateStatus(ctx context.Context, bigQueryDataTransferConfig *v1beta1.BigQueryDataTransferConfig, opts v1.UpdateOptions) (*v1beta1.BigQueryDataTransferConfig, error) { obj, err := c.Fake. - Invokes(testing.NewUpdateSubresourceAction(bigquerydatatransferconfigsResource, "status", c.ns, bigQueryDataTransferConfig), &v1alpha1.BigQueryDataTransferConfig{}) + Invokes(testing.NewUpdateSubresourceAction(bigquerydatatransferconfigsResource, "status", c.ns, bigQueryDataTransferConfig), &v1beta1.BigQueryDataTransferConfig{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryDataTransferConfig), err + return obj.(*v1beta1.BigQueryDataTransferConfig), err } // Delete takes name of the bigQueryDataTransferConfig and deletes it. Returns an error if one occurs. func (c *FakeBigQueryDataTransferConfigs) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { _, err := c.Fake. - Invokes(testing.NewDeleteActionWithOptions(bigquerydatatransferconfigsResource, c.ns, name, opts), &v1alpha1.BigQueryDataTransferConfig{}) + Invokes(testing.NewDeleteActionWithOptions(bigquerydatatransferconfigsResource, c.ns, name, opts), &v1beta1.BigQueryDataTransferConfig{}) return err } @@ -128,17 +128,17 @@ func (c *FakeBigQueryDataTransferConfigs) Delete(ctx context.Context, name strin func (c *FakeBigQueryDataTransferConfigs) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { action := testing.NewDeleteCollectionAction(bigquerydatatransferconfigsResource, c.ns, listOpts) - _, err := c.Fake.Invokes(action, &v1alpha1.BigQueryDataTransferConfigList{}) + _, err := c.Fake.Invokes(action, &v1beta1.BigQueryDataTransferConfigList{}) return err } // Patch applies the patch and returns the patched bigQueryDataTransferConfig. -func (c *FakeBigQueryDataTransferConfigs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.BigQueryDataTransferConfig, err error) { +func (c *FakeBigQueryDataTransferConfigs) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.BigQueryDataTransferConfig, err error) { obj, err := c.Fake. - Invokes(testing.NewPatchSubresourceAction(bigquerydatatransferconfigsResource, c.ns, name, pt, data, subresources...), &v1alpha1.BigQueryDataTransferConfig{}) + Invokes(testing.NewPatchSubresourceAction(bigquerydatatransferconfigsResource, c.ns, name, pt, data, subresources...), &v1beta1.BigQueryDataTransferConfig{}) if obj == nil { return nil, err } - return obj.(*v1alpha1.BigQueryDataTransferConfig), err + return obj.(*v1beta1.BigQueryDataTransferConfig), err } diff --git a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/generated_expansion.go b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/generated_expansion.go similarity index 98% rename from pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/generated_expansion.go rename to pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/generated_expansion.go index 5ca878b12a..819c7724e3 100644 --- a/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1alpha1/generated_expansion.go +++ b/pkg/clients/generated/client/clientset/versioned/typed/bigquerydatatransfer/v1beta1/generated_expansion.go @@ -19,6 +19,6 @@ // Code generated by client-gen. DO NOT EDIT. -package v1alpha1 +package v1beta1 type BigQueryDataTransferConfigExpansion interface{} diff --git a/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransfer_mappings.go b/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransfer_mappings.go index d436ce7a24..4480feda30 100644 --- a/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransfer_mappings.go +++ b/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransfer_mappings.go @@ -16,7 +16,7 @@ package bigquerydatatransfer import ( pb "cloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1beta1" refv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" "google.golang.org/protobuf/types/known/structpb" diff --git a/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_controller.go b/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_controller.go index ee2fa43883..83aa240308 100644 --- a/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_controller.go +++ b/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_controller.go @@ -19,7 +19,7 @@ import ( "fmt" "reflect" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1beta1" refv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/config" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" @@ -38,7 +38,6 @@ import ( ) const ( - ctrlName = "bigquerydatatransfer-controller" serviceDomain = "//bigquerydatatransfer.googleapis.com" ) @@ -186,7 +185,7 @@ type Adapter struct { var _ directbase.Adapter = &Adapter{} func (a *Adapter) Find(ctx context.Context) (bool, error) { - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) if a.id.transferConfigID == "" { // resource ID is not yet generated by the GCP service return false, nil @@ -209,7 +208,7 @@ func (a *Adapter) Find(ctx context.Context) (bool, error) { func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperation) error { u := createOp.GetUnstructured() - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) log.V(2).Info("creating BigQueryDataTransferConfig", "name", a.id.FullyQualifiedName()) mapCtx := &direct.MapContext{} @@ -256,7 +255,7 @@ func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperati func (a *Adapter) Update(ctx context.Context, updateOp *directbase.UpdateOperation) error { u := updateOp.GetUnstructured() - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) log.V(2).Info("updating BigQueryDataTransferConfig", "name", a.id.FullyQualifiedName()) mapCtx := &direct.MapContext{} @@ -368,7 +367,7 @@ func (a *Adapter) Export(ctx context.Context) (*unstructured.Unstructured, error // Delete implements the Adapter interface. func (a *Adapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { - log := klog.FromContext(ctx).WithName(ctrlName) + log := klog.FromContext(ctx) log.V(2).Info("deleting BigQueryDataTransferConfig", "name", a.id.FullyQualifiedName()) if a.id.transferConfigID == "" { diff --git a/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_externalresource.go b/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_externalresource.go index 1df1fadf2d..7040c7d526 100644 --- a/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_externalresource.go +++ b/pkg/controller/direct/bigquerydatatransfer/bigquerydatatransferconfig_externalresource.go @@ -18,7 +18,7 @@ import ( "fmt" "strings" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" ) diff --git a/pkg/controller/direct/bigquerydatatransfer/mapper.generated.go b/pkg/controller/direct/bigquerydatatransfer/mapper.generated.go index e4c6d2608a..e7af0b7901 100644 --- a/pkg/controller/direct/bigquerydatatransfer/mapper.generated.go +++ b/pkg/controller/direct/bigquerydatatransfer/mapper.generated.go @@ -16,7 +16,7 @@ package bigquerydatatransfer import ( pb "cloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb" - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1alpha1" + krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquerydatatransfer/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/controller/direct" ) diff --git a/pkg/gvks/supportedgvks/gvks_generated.go b/pkg/gvks/supportedgvks/gvks_generated.go index 785ffa0a6d..9617028451 100644 --- a/pkg/gvks/supportedgvks/gvks_generated.go +++ b/pkg/gvks/supportedgvks/gvks_generated.go @@ -544,6 +544,16 @@ var SupportedGVKs = map[schema.GroupVersionKind]GVKMetadata{ "cnrm.cloud.google.com/system": "true", }, }, + { + Group: "bigquerydatatransfer.cnrm.cloud.google.com", + Version: "v1beta1", + Kind: "BigQueryDataTransferConfig", + }: { + Labels: map[string]string{ + "cnrm.cloud.google.com/managed-by-kcc": "true", + "cnrm.cloud.google.com/system": "true", + }, + }, { Group: "bigquery.cnrm.cloud.google.com", Version: "v1beta1", diff --git a/pkg/snippet/snippetgeneration/snippetgeneration.go b/pkg/snippet/snippetgeneration/snippetgeneration.go index 6ac21b2582..1e333ae85a 100644 --- a/pkg/snippet/snippetgeneration/snippetgeneration.go +++ b/pkg/snippet/snippetgeneration/snippetgeneration.go @@ -38,6 +38,7 @@ var preferredSampleForResource = map[string]string{ "bigqueryjob": "query-bigquery-job", "bigtableappprofile": "multicluster-bigtable-app-profile", "bigtableinstance": "replicated-instance", + "bigquerydatatransferconfig": "bigquerydatatransferconfig-salesforce", "billingbudgetsbudget": "calendar-budget", "binaryauthorizationpolicy": "cluster-policy", "certificatemanagercertificate": "self-managed-certificate", diff --git a/pkg/test/resourcefixture/sets.go b/pkg/test/resourcefixture/sets.go index e2f7ef763c..fb673a579a 100644 --- a/pkg/test/resourcefixture/sets.go +++ b/pkg/test/resourcefixture/sets.go @@ -92,6 +92,7 @@ func IsPureDirectResource(gk schema.GroupKind) bool { "RedisCluster", "NetworkConnectivityServiceConnectionPolicy", "DataformRepository", + "BigQueryDataTransferConfig", } return slices.Contains(pureDirectResources, gk.Kind) } diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_generated_object_bigquerydatatransferconfig-salesforce.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_generated_object_bigquerydatatransferconfig-salesforce.golden.yaml similarity index 95% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_generated_object_bigquerydatatransferconfig-salesforce.golden.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_generated_object_bigquerydatatransferconfig-salesforce.golden.yaml index 538b93c3ce..e7e424f402 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_generated_object_bigquerydatatransferconfig-salesforce.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_generated_object_bigquerydatatransferconfig-salesforce.golden.yaml @@ -1,4 +1,4 @@ -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataTransferConfig metadata: annotations: diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log similarity index 100% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/_http.log diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/create.yaml similarity index 94% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/create.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/create.yaml index 4b1ab90a44..de5cbad813 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/create.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataTransferConfig metadata: name: bigquerydatatransferconfig-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/dependencies.yaml similarity index 100% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/dependencies.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/dependencies.yaml diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/update.yaml similarity index 94% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/update.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/update.yaml index f185bd5703..b5010fc6e1 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-salesforce/update.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataTransferConfig metadata: name: bigquerydatatransferconfig-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_generated_object_bigquerydatatransferconfig-scheduledquery.golden.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_generated_object_bigquerydatatransferconfig-scheduledquery.golden.yaml similarity index 95% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_generated_object_bigquerydatatransferconfig-scheduledquery.golden.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_generated_object_bigquerydatatransferconfig-scheduledquery.golden.yaml index 6ef1f2141b..7a7caac5cb 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_generated_object_bigquerydatatransferconfig-scheduledquery.golden.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_generated_object_bigquerydatatransferconfig-scheduledquery.golden.yaml @@ -1,4 +1,4 @@ -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataTransferConfig metadata: annotations: diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log similarity index 100% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/_http.log diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/create.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/create.yaml similarity index 94% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/create.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/create.yaml index 56b21dcf00..e09fccea65 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/create.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/create.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataTransferConfig metadata: name: bigquerydatatransferconfig-${uniqueId} diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/dependencies.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/dependencies.yaml similarity index 100% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/dependencies.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/dependencies.yaml diff --git a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/update.yaml b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/update.yaml similarity index 94% rename from pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/update.yaml rename to pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/update.yaml index 7cb911a4c0..98bd3102fc 100644 --- a/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1alpha1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/update.yaml +++ b/pkg/test/resourcefixture/testdata/basic/bigquerydatatransfer/v1beta1/bigquerydatatransferconfig/bigquerydatatransferconfig-scheduledquery/update.yaml @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1alpha1 +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 kind: BigQueryDataTransferConfig metadata: name: bigquerydatatransferconfig-${uniqueId} diff --git a/scripts/generate-google3-docs/resource-reference/_toc.yaml b/scripts/generate-google3-docs/resource-reference/_toc.yaml index de1bd45088..28a12a2c89 100644 --- a/scripts/generate-google3-docs/resource-reference/_toc.yaml +++ b/scripts/generate-google3-docs/resource-reference/_toc.yaml @@ -51,6 +51,8 @@ toc: path: /config-connector/docs/reference/resource-docs/bigquery/bigqueryjob.md - title: "BigQueryTable" path: /config-connector/docs/reference/resource-docs/bigquery/bigquerytable.md + - title: "BigQueryDataTransferConfig" + path: /config-connector/docs/reference/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md - title: "BillingBudgets" section: - title: "BillingBudgetsBudget" diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md new file mode 100644 index 0000000000..4833cabaa3 --- /dev/null +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md @@ -0,0 +1,750 @@ +{# AUTOGENERATED. DO NOT EDIT. #} + +{% extends "config-connector/_base.html" %} + +{% block page_title %}BigQueryDataTransferConfig{% endblock %} +{% block body %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyValue
{{gcp_name_short}} Service NameBigQuery Data Transfer
{{gcp_name_short}} Service Documentation/bigquery/docs/
{{gcp_name_short}} REST Resource Namev1beta1.projects.locations.transferConfigs
{{gcp_name_short}} REST Resource Documentation/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs
{{product_name_short}} Resource Short Namesbigquerydatatransferconfig
{{product_name_short}} Service Namebigquerydatatransfer.googleapis.com
{{product_name_short}} Resource Fully Qualified Namebigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com
Can Be Referenced by IAMPolicy/IAMPolicyMemberNo
{{product_name_short}} Default Average Reconcile Interval In Seconds600
+ +## Custom Resource Definition Properties + + + +### Spec +#### Schema +```yaml +dataRefreshWindowDays: integer +dataSourceID: string +datasetRef: + external: string + name: string + namespace: string +disabled: boolean +displayName: string +emailPreferences: + enableFailureEmail: boolean +encryptionConfiguration: + kmsKeyRef: + external: string + name: string + namespace: string +location: string +params: + string: string +projectRef: + external: string + kind: string + name: string + namespace: string +pubSubTopicRef: + external: string + name: string + namespace: string +resourceID: string +schedule: string +scheduleOptions: + disableAutoScheduling: boolean + endTime: string + startTime: string +serviceAccountRef: + external: string + name: string + namespace: string +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Fields
+

dataRefreshWindowDays

+

Optional

+
+

integer

+

{% verbatim %}The number of days to look back to automatically refresh the data. For example, if `data_refresh_window_days = 10`, then every day BigQuery reingests data for [today-10, today-1], rather than ingesting data for just [today-1]. Only valid if the data source supports the feature. Set the value to 0 to use the default value.{% endverbatim %}

+
+

dataSourceID

+

Required

+
+

string

+

{% verbatim %}Immutable. Data source ID. This cannot be changed once data transfer is created. The full list of available data source IDs can be returned through an API call: https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list{% endverbatim %}

+
+

datasetRef

+

Required

+
+

object

+

{% verbatim %}The BigQuery target dataset id.{% endverbatim %}

+
+

datasetRef.external

+

Optional

+
+

string

+

{% verbatim %}If provided must be in the format `projects/[project_id]/datasets/[dataset_id]`.{% endverbatim %}

+
+

datasetRef.name

+

Optional

+
+

string

+

{% verbatim %}The `metadata.name` field of a `BigQueryDataset` resource.{% endverbatim %}

+
+

datasetRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `metadata.namespace` field of a `BigQueryDataset` resource.{% endverbatim %}

+
+

disabled

+

Optional

+
+

boolean

+

{% verbatim %}Is this config disabled. When set to true, no runs will be scheduled for this transfer config.{% endverbatim %}

+
+

displayName

+

Optional

+
+

string

+

{% verbatim %}User specified display name for the data transfer.{% endverbatim %}

+
+

emailPreferences

+

Optional

+
+

object

+

{% verbatim %}Email notifications will be sent according to these preferences to the email address of the user who owns this transfer config.{% endverbatim %}

+
+

emailPreferences.enableFailureEmail

+

Optional

+
+

boolean

+

{% verbatim %}If true, email notifications will be sent on transfer run failures.{% endverbatim %}

+
+

encryptionConfiguration

+

Optional

+
+

object

+

{% verbatim %}The encryption configuration part. Currently, it is only used for the optional KMS key name. The BigQuery service account of your project must be granted permissions to use the key. Read methods will return the key name applied in effect. Write methods will apply the key if it is present, or otherwise try to apply project default keys if it is absent.{% endverbatim %}

+
+

encryptionConfiguration.kmsKeyRef

+

Optional

+
+

object

+

{% verbatim %}The KMS key used for encrypting BigQuery data.{% endverbatim %}

+
+

encryptionConfiguration.kmsKeyRef.external

+

Optional

+
+

string

+

{% verbatim %}A reference to an externally managed KMSCryptoKey. Should be in the format `projects/[kms_project_id]/locations/[region]/keyRings/[key_ring_id]/cryptoKeys/[key]`.{% endverbatim %}

+
+

encryptionConfiguration.kmsKeyRef.name

+

Optional

+
+

string

+

{% verbatim %}The `name` of a `KMSCryptoKey` resource.{% endverbatim %}

+
+

encryptionConfiguration.kmsKeyRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `namespace` of a `KMSCryptoKey` resource.{% endverbatim %}

+
+

location

+

Required

+
+

string

+

{% verbatim %}Immutable.{% endverbatim %}

+
+

params

+

Required

+
+

map (key: string, value: string)

+

{% verbatim %}Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer' section for each data source. For example the parameters for Cloud Storage transfers are listed here: https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq{% endverbatim %}

+
+

projectRef

+

Required

+
+

object

+

{% verbatim %}The Project that this resource belongs to.{% endverbatim %}

+
+

projectRef.external

+

Optional

+
+

string

+

{% verbatim %}The `projectID` field of a project, when not managed by Config Connector.{% endverbatim %}

+
+

projectRef.kind

+

Optional

+
+

string

+

{% verbatim %}The kind of the Project resource; optional but must be `Project` if provided.{% endverbatim %}

+
+

projectRef.name

+

Optional

+
+

string

+

{% verbatim %}The `name` field of a `Project` resource.{% endverbatim %}

+
+

projectRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `namespace` field of a `Project` resource.{% endverbatim %}

+
+

pubSubTopicRef

+

Optional

+
+

object

+

{% verbatim %}Pub/Sub topic where notifications will be sent after transfer runs associated with this transfer config finish.{% endverbatim %}

+
+

pubSubTopicRef.external

+

Optional

+
+

string

+

{% verbatim %}If provided must be in the format `projects/[project_id]/topics/[topic_id]`.{% endverbatim %}

+
+

pubSubTopicRef.name

+

Optional

+
+

string

+

{% verbatim %}The `metadata.name` field of a `PubSubTopic` resource.{% endverbatim %}

+
+

pubSubTopicRef.namespace

+

Optional

+
+

string

+

{% verbatim %}The `metadata.namespace` field of a `PubSubTopic` resource.{% endverbatim %}

+
+

resourceID

+

Optional

+
+

string

+

{% verbatim %}Immutable. The BigQueryDataTransferConfig name. If not given, the metadata.name will be used.{% endverbatim %}

+
+

schedule

+

Optional

+
+

string

+

{% verbatim %}Data transfer schedule. + If the data source does not support a custom schedule, this should be + empty. If it is empty, the default value for the data source will be used. + The specified times are in UTC. + Examples of valid format: + `1st,3rd monday of month 15:30`, + `every wed,fri of jan,jun 13:15`, and + `first sunday of quarter 00:00`. + See more explanation about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + + NOTE: The minimum interval time between recurring transfers depends on the + data source; refer to the documentation for your data source.{% endverbatim %}

+
+

scheduleOptions

+

Optional

+
+

object

+

{% verbatim %}Options customizing the data transfer schedule.{% endverbatim %}

+
+

scheduleOptions.disableAutoScheduling

+

Optional

+
+

boolean

+

{% verbatim %}If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using StartManualTransferRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.{% endverbatim %}

+
+

scheduleOptions.endTime

+

Optional

+
+

string

+

{% verbatim %}Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.{% endverbatim %}

+
+

scheduleOptions.startTime

+

Optional

+
+

string

+

{% verbatim %}Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be triggered manually is not limited by this option.{% endverbatim %}

+
+

serviceAccountRef

+

Optional

+
+

object

+

{% verbatim %}Service account email. If this field is set, the transfer config will be created with this service account's credentials. It requires that the requesting user calling this API has permissions to act as this service account. Note that not all data sources support service account credentials when creating a transfer config. For the latest list of data sources, please refer to https://cloud.google.com/bigquery/docs/use-service-accounts.{% endverbatim %}

+
+

serviceAccountRef.external

+

Optional

+
+

string

+

{% verbatim %}The `email` field of an `IAMServiceAccount` resource.{% endverbatim %}

+
+

serviceAccountRef.name

+

Optional

+
+

string

+

{% verbatim %}Name of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names{% endverbatim %}

+
+

serviceAccountRef.namespace

+

Optional

+
+

string

+

{% verbatim %}Namespace of the referent. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/{% endverbatim %}

+
+ + + +### Status +#### Schema +```yaml +conditions: +- lastTransitionTime: string + message: string + reason: string + status: string + type: string +externalRef: string +observedGeneration: integer +observedState: + datasetRegion: string + name: string + nextRunTime: string + ownerInfo: + email: string + state: string + updateTime: string + userID: integer +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Fields
conditions +

list (object)

+

{% verbatim %}Conditions represent the latest available observations of the object's current state.{% endverbatim %}

+
conditions[] +

object

+

{% verbatim %}{% endverbatim %}

+
conditions[].lastTransitionTime +

string

+

{% verbatim %}Last time the condition transitioned from one status to another.{% endverbatim %}

+
conditions[].message +

string

+

{% verbatim %}Human-readable message indicating details about last transition.{% endverbatim %}

+
conditions[].reason +

string

+

{% verbatim %}Unique, one-word, CamelCase reason for the condition's last transition.{% endverbatim %}

+
conditions[].status +

string

+

{% verbatim %}Status is the status of the condition. Can be True, False, Unknown.{% endverbatim %}

+
conditions[].type +

string

+

{% verbatim %}Type is the type of the condition.{% endverbatim %}

+
externalRef +

string

+

{% verbatim %}A unique specifier for the BigQueryDataTransferConfig resource in GCP.{% endverbatim %}

+
observedGeneration +

integer

+

{% verbatim %}ObservedGeneration is the generation of the resource that was most recently observed by the Config Connector controller. If this is equal to metadata.generation, then that means that the current reported status reflects the most recent desired state of the resource.{% endverbatim %}

+
observedState +

object

+

{% verbatim %}ObservedState is the state of the resource as most recently observed in GCP.{% endverbatim %}

+
observedState.datasetRegion +

string

+

{% verbatim %}Output only. Region in which BigQuery dataset is located.{% endverbatim %}

+
observedState.name +

string

+

{% verbatim %}Identifier. The resource name of the transfer config. Transfer config names have the form either `projects/{project_id}/locations/{region}/transferConfigs/{config_id}` or `projects/{project_id}/transferConfigs/{config_id}`, where `config_id` is usually a UUID, even though it is not guaranteed or required. The name is ignored when creating a transfer config.{% endverbatim %}

+
observedState.nextRunTime +

string

+

{% verbatim %}Output only. Next time when data transfer will run.{% endverbatim %}

+
observedState.ownerInfo +

object

+

{% verbatim %}Output only. Information about the user whose credentials are used to transfer data. Populated only for `transferConfigs.get` requests. In case the user information is not available, this field will not be populated.{% endverbatim %}

+
observedState.ownerInfo.email +

string

+

{% verbatim %}E-mail address of the user.{% endverbatim %}

+
observedState.state +

string

+

{% verbatim %}Output only. State of the most recently updated transfer run.{% endverbatim %}

+
observedState.updateTime +

string

+

{% verbatim %}Output only. Data transfer modification time. Ignored by server on input.{% endverbatim %}

+
observedState.userID +

integer

+

{% verbatim %}Deprecated. Unique ID of the user on whose behalf transfer is done.{% endverbatim %}

+
+ +## Sample YAML(s) + +### Bigquerydatatransferconfig Salesforce +```yaml +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataTransferConfig +metadata: + name: bigquerydatatransferconfig-sample-salesforce +spec: + projectRef: + # Replace ${PROJECT_ID?} with your project ID. + external: ${PROJECT_ID?} + location: us-central1 + displayName: "example of big query data transfer config" + dataSourceID: "salesforce" + datasetRef: + name: bigquerydatatransferconfigdepsalesforce + params: + "connector.authentication.oauth.clientId": "client-id" + "connector.authentication.oauth.clientSecret": "client-secret" + "connector.authentication.oauth.myDomain": "MyDomainName" + "assets": "asset-a" +--- +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatatransferconfigdepsalesforce +spec: + friendlyName: bigquerydatatransferconfig-dep-salesforce + location: us-central1 +``` + +### Bigquerydatatransferconfig Scheduledquery +```yaml +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataTransferConfig +metadata: + name: bigquerydatatransferconfig-sample-scheduledquery +spec: + projectRef: + # Replace ${PROJECT_ID?} with your project ID. + external: ${PROJECT_ID?} + location: us-central1 + displayName: "example of scheduled query" + dataSourceID: "scheduled_query" + datasetRef: + name: bigquerydatatransferconfigdepscheduledquery + params: + destination_table_name_template: "my_table" + write_disposition: "WRITE_APPEND" + query: "SELECT name FROM tabl WHERE x = 'y'" + schedule: "first sunday of quarter 00:00" + serviceAccountRef: + name: gsa-dep-scheduledquery +--- +apiVersion: bigquery.cnrm.cloud.google.com/v1beta1 +kind: BigQueryDataset +metadata: + name: bigquerydatatransferconfigdepscheduledquery +spec: + friendlyName: bigquerydatatransferconfig-dep-scheduledquery + location: us-central1 +--- +apiVersion: iam.cnrm.cloud.google.com/v1beta1 +kind: IAMServiceAccount +metadata: + annotations: + # Replace ${PROJECT_ID?} with your project ID. + cnrm.cloud.google.com/project-id: "${PROJECT_ID?}" + name: gsa-dep-scheduledquery +``` + + +Note: If you have any trouble with instantiating the resource, refer to Troubleshoot Config Connector. + +{% endblock %} diff --git a/scripts/generate-google3-docs/resource-reference/overview.md b/scripts/generate-google3-docs/resource-reference/overview.md index 4659e1ade4..a675bd3c22 100644 --- a/scripts/generate-google3-docs/resource-reference/overview.md +++ b/scripts/generate-google3-docs/resource-reference/overview.md @@ -85,6 +85,10 @@ issues for {{product_name_short}}. {{bigquery_name}} BigQueryTable + + {{bigquery_name}} + BigQueryDataTransferConfig + {{bigtable_name}} BigtableAppProfile diff --git a/scripts/generate-google3-docs/resource-reference/templates/bigquerydatatransfer_bigquerydatatransferconfig.tmpl b/scripts/generate-google3-docs/resource-reference/templates/bigquerydatatransfer_bigquerydatatransferconfig.tmpl new file mode 100644 index 0000000000..bc258e79b0 --- /dev/null +++ b/scripts/generate-google3-docs/resource-reference/templates/bigquerydatatransfer_bigquerydatatransferconfig.tmpl @@ -0,0 +1,54 @@ +{{template "headercomment.tmpl" .}} + +{% extends "config-connector/_base.html" %} + +{% block page_title %}{{ .Kind}}{% endblock %} +{% block body %} +{{template "alphadisclaimer.tmpl" .}} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +{{template "iamsupport.tmpl" .}} + + + + + +
PropertyValue
{{"{{gcp_name_short}}"}} Service NameBigQuery Data Transfer
{{"{{gcp_name_short}}"}} Service Documentation/bigquery/docs/
{{"{{gcp_name_short}}"}} REST Resource Namev1beta1.projects.locations.transferConfigs
{{"{{gcp_name_short}}"}} REST Resource Documentation/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs
{{"{{product_name_short}}"}} Resource Short Names{{ .ShortNames}}
{{"{{product_name_short}}"}} Service Namebigquerydatatransfer.googleapis.com
{{"{{product_name_short}}"}} Resource Fully Qualified Name{{ .FullyQualifiedName}}
{{"{{product_name_short}}"}} Default Average Reconcile Interval In Seconds{{ .DefaultReconcileInterval}}
+ +{{template "resource.tmpl" .}} +{{template "endnote.tmpl" .}} +{% endblock %} diff --git a/scripts/resource-autogen/allowlist/allowlist.go b/scripts/resource-autogen/allowlist/allowlist.go index c84c7dada3..75f0c0be98 100644 --- a/scripts/resource-autogen/allowlist/allowlist.go +++ b/scripts/resource-autogen/allowlist/allowlist.go @@ -58,7 +58,6 @@ var ( "bigquery_analytics_hub/google_bigquery_analytics_hub_data_exchange", "bigquery_analytics_hub/google_bigquery_analytics_hub_listing", "bigquery_connection/google_bigquery_connection", - "bigquery_data_transfer/google_bigquery_data_transfer_config", "bigquery_datapolicy/google_bigquery_datapolicy_data_policy", "bigquery_reservation/google_bigquery_capacity_commitment", "bigquery_reservation/google_bigquery_reservation",