From 48cb1dd7b1274d2bcd6bc9195af2415cec8ed4f0 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 30 Sep 2024 18:54:36 +0000 Subject: [PATCH 1/2] feat:Add scheduleOptionsV2 and Error fields for TransferConfig PiperOrigin-RevId: 680586383 Source-Link: https://github.com/googleapis/googleapis/commit/463b5a6b06e20504fb44bfedff59ba05b42bf0b2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/68d602fca86cfbf7653612f50c5cf9e3105065c9 Copy-Tag: eyJwIjoiZ29vZ2xlLWNsb3VkLWJpZ3F1ZXJ5LWRhdGFfdHJhbnNmZXItdjEvLk93bEJvdC55YW1sIiwiaCI6IjY4ZDYwMmZjYTg2Y2ZiZjc2NTM2MTJmNTBjNWNmOWUzMTA1MDY1YzkifQ== --- .../.gitignore | 22 + .../.repo-metadata.json | 18 + .../.rubocop.yml | 33 + .../.toys.rb | 28 + .../.yardopts | 12 + .../AUTHENTICATION.md | 122 + .../CHANGELOG.md | 2 + .../Gemfile | 11 + .../LICENSE.md | 201 ++ .../README.md | 144 ++ .../Rakefile | 168 ++ .../gapic_metadata.json | 98 + ...le-cloud-bigquery-data_transfer-v1.gemspec | 29 + .../google-cloud-bigquery-data_transfer-v1.rb | 21 + .../google/cloud/bigquery/data_transfer/v1.rb | 47 + .../data_transfer/v1/bindings_override.rb | 104 + .../data_transfer/v1/data_transfer_service.rb | 57 + .../v1/data_transfer_service/client.rb | 2127 +++++++++++++++++ .../v1/data_transfer_service/credentials.rb | 53 + .../v1/data_transfer_service/paths.rb | 193 ++ .../v1/data_transfer_service/rest.rb | 55 + .../v1/data_transfer_service/rest/client.rb | 1995 ++++++++++++++++ .../rest/service_stub.rb | 1133 +++++++++ .../cloud/bigquery/data_transfer/v1/rest.rb | 40 + .../bigquery/data_transfer/v1/version.rb | 30 + .../datatransfer/v1/datatransfer_pb.rb | 89 + .../v1/datatransfer_services_pb.rb | 99 + .../bigquery/datatransfer/v1/transfer_pb.rb | 68 + .../proto_docs/README.md | 4 + .../proto_docs/google/api/client.rb | 420 ++++ .../proto_docs/google/api/field_behavior.rb | 85 + .../proto_docs/google/api/launch_stage.rb | 71 + .../proto_docs/google/api/resource.rb | 227 ++ .../bigquery/datatransfer/v1/datatransfer.rb | 722 ++++++ .../bigquery/datatransfer/v1/transfer.rb | 405 ++++ .../proto_docs/google/protobuf/any.rb | 145 ++ .../proto_docs/google/protobuf/duration.rb | 98 + .../proto_docs/google/protobuf/empty.rb | 34 + .../proto_docs/google/protobuf/field_mask.rb | 229 ++ .../proto_docs/google/protobuf/struct.rb | 96 + .../proto_docs/google/protobuf/timestamp.rb | 127 + .../proto_docs/google/protobuf/wrappers.rb | 121 + .../proto_docs/google/rpc/status.rb | 48 + .../snippets/Gemfile | 32 + .../check_valid_creds.rb | 47 + .../create_transfer_config.rb | 47 + .../delete_transfer_config.rb | 47 + .../delete_transfer_run.rb | 47 + .../enroll_data_sources.rb | 47 + .../data_transfer_service/get_data_source.rb | 47 + .../get_transfer_config.rb | 47 + .../data_transfer_service/get_transfer_run.rb | 47 + .../list_data_sources.rb | 51 + .../list_transfer_configs.rb | 51 + .../list_transfer_logs.rb | 51 + .../list_transfer_runs.rb | 51 + .../schedule_transfer_runs.rb | 47 + .../start_manual_transfer_runs.rb | 47 + .../unenroll_data_sources.rb | 47 + .../update_transfer_config.rb | 47 + ...google.cloud.bigquery.datatransfer.v1.json | 655 +++++ .../v1/data_transfer_service_paths_test.rb | 104 + .../v1/data_transfer_service_rest_test.rb | 980 ++++++++ .../v1/data_transfer_service_test.rb | 1075 +++++++++ .../test/helper.rb | 25 + 65 files changed, 13470 insertions(+) create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb create mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore new file mode 100644 index 000000000000..0135b6bc6cfc --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore @@ -0,0 +1,22 @@ +# Ignore bundler lockfiles +Gemfile.lock +gems.locked + +# Ignore documentation output +doc/* +.yardoc/* + +# Ignore test output +coverage/* + +# Ignore build artifacts +pkg/* + +# Ignore files commonly present in certain dev environments +.vagrant +.DS_STORE +.idea +*.iml + +# Ignore synth output +__pycache__ diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json new file mode 100644 index 000000000000..682905e95539 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "api_id": "bigquerydatatransfer.googleapis.com", + "api_shortname": "bigquerydatatransfer", + "client_documentation": "https://cloud.google.com/ruby/docs/reference/google-cloud-bigquery-data_transfer-v1/latest", + "distribution_name": "google-cloud-bigquery-data_transfer-v1", + "is_cloud": true, + "language": "ruby", + "name": "bigquerydatatransfer", + "name_pretty": "BigQuery Data Transfer Service V1 API", + "product_documentation": "https://cloud.google.com/bigquery/transfer", + "release_level": "unreleased", + "repo": "googleapis/google-cloud-ruby", + "requires_billing": true, + "ruby-cloud-description": "Schedules queries and transfers external data from SaaS applications to Google BigQuery on a regular basis. Note that google-cloud-bigquery-data_transfer-v1 is a version-specific client library. For most uses, we recommend installing the main client library google-cloud-bigquery-data_transfer instead. See the readme for more details.", + "ruby-cloud-env-prefix": "DATA_TRANSFER", + "ruby-cloud-product-url": "https://cloud.google.com/bigquery/transfer", + "library_type": "GAPIC_AUTO" +} diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml new file mode 100644 index 000000000000..6f5635fddf0e --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml @@ -0,0 +1,33 @@ +inherit_gem: + google-style: google-style.yml + +AllCops: + Exclude: + - "google-cloud-bigquery-data_transfer-v1.gemspec" + - "lib/**/*_pb.rb" + - "proto_docs/**/*" + - "test/**/*" + - "acceptance/**/*" + - "samples/acceptance/**/*" + - "Rakefile" + +Layout/LineLength: + Enabled: false +Metrics/AbcSize: + Enabled: false +Metrics/ClassLength: + Enabled: false +Metrics/CyclomaticComplexity: + Enabled: false +Metrics/MethodLength: + Enabled: false +Metrics/ModuleLength: + Enabled: false +Metrics/PerceivedComplexity: + Enabled: false +Naming/AccessorMethodName: + Exclude: + - "snippets/**/*.rb" +Naming/FileName: + Exclude: + - "lib/google-cloud-bigquery-data_transfer-v1.rb" diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb new file mode 100644 index 000000000000..23434bdd5d5b --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +toys_version! ">= 0.15.3" + +if ENV["RUBY_COMMON_TOOLS"] + common_tools_dir = File.expand_path ENV["RUBY_COMMON_TOOLS"] + load File.join(common_tools_dir, "toys", "gapic") +else + load_git remote: "https://github.com/googleapis/ruby-common-tools.git", + path: "toys/gapic", + update: true +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts new file mode 100644 index 000000000000..304c3609fa4b --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts @@ -0,0 +1,12 @@ +--no-private +--title="BigQuery Data Transfer Service V1 API" +--exclude _pb\.rb$ +--markup markdown +--markup-provider redcarpet + +./lib/**/*.rb +./proto_docs/**/*.rb +- +README.md +LICENSE.md +AUTHENTICATION.md diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md new file mode 100644 index 000000000000..4740aa501122 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md @@ -0,0 +1,122 @@ +# Authentication + +The recommended way to authenticate to the google-cloud-bigquery-data_transfer-v1 library is to use +[Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/application-default-credentials). +To review all of your authentication options, see [Credentials lookup](#credential-lookup). + +## Quickstart + +The following example shows how to set up authentication for a local development +environment with your user credentials. + +**NOTE:** This method is _not_ recommended for running in production. User credentials +should be used only during development. + +1. [Download and install the Google Cloud CLI](https://cloud.google.com/sdk). +2. Set up a local ADC file with your user credentials: + +```sh +gcloud auth application-default login +``` + +3. Write code as if already authenticated. + +For more information about setting up authentication for a local development environment, see +[Set up Application Default Credentials](https://cloud.google.com/docs/authentication/provide-credentials-adc#local-dev). + +## Credential Lookup + +The google-cloud-bigquery-data_transfer-v1 library provides several mechanisms to configure your system. +Generally, using Application Default Credentials to facilitate automatic +credentials discovery is the easist method. But if you need to explicitly specify +credentials, there are several methods available to you. + +Credentials are accepted in the following ways, in the following order or precedence: + +1. Credentials specified in method arguments +2. Credentials specified in configuration +3. Credentials pointed to or included in environment variables +4. Credentials found in local ADC file +5. Credentials returned by the metadata server for the attached service account (GCP) + +### Configuration + +You can configure a path to a JSON credentials file, either for an individual client object or +globally, for all client objects. The JSON file can contain credentials created for +[workload identity federation](https://cloud.google.com/iam/docs/workload-identity-federation), +[workforce identity federation](https://cloud.google.com/iam/docs/workforce-identity-federation), or a +[service account key](https://cloud.google.com/docs/authentication/provide-credentials-adc#local-key). + +Note: Service account keys are a security risk if not managed correctly. You should +[choose a more secure alternative to service account keys](https://cloud.google.com/docs/authentication#auth-decision-tree) +whenever possible. + +To configure a credentials file for an individual client initialization: + +```ruby +require "google/cloud/bigquery/data_transfer/v1" + +client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = "path/to/credentialfile.json" +end +``` + +To configure a credentials file globally for all clients: + +```ruby +require "google/cloud/bigquery/data_transfer/v1" + +::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config| + config.credentials = "path/to/credentialfile.json" +end + +client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new +``` + +### Environment Variables + +You can also use an environment variable to provide a JSON credentials file. +The environment variable can contain a path to the credentials file or, for +environments such as Docker containers where writing files is not encouraged, +you can include the credentials file itself. + +The JSON file can contain credentials created for +[workload identity federation](https://cloud.google.com/iam/docs/workload-identity-federation), +[workforce identity federation](https://cloud.google.com/iam/docs/workforce-identity-federation), or a +[service account key](https://cloud.google.com/docs/authentication/provide-credentials-adc#local-key). + +Note: Service account keys are a security risk if not managed correctly. You should +[choose a more secure alternative to service account keys](https://cloud.google.com/docs/authentication#auth-decision-tree) +whenever possible. + +The environment variables that google-cloud-bigquery-data_transfer-v1 +checks for credentials are: + +* `GOOGLE_CLOUD_CREDENTIALS` - Path to JSON file, or JSON contents +* `GOOGLE_APPLICATION_CREDENTIALS` - Path to JSON file + +```ruby +require "google/cloud/bigquery/data_transfer/v1" + +ENV["GOOGLE_APPLICATION_CREDENTIALS"] = "path/to/credentialfile.json" + +client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new +``` + +### Local ADC file + +You can set up a local ADC file with your user credentials for authentication during +development. If credentials are not provided in code or in environment variables, +then the local ADC credentials are discovered. + +Follow the steps in [Quickstart](#quickstart) to set up a local ADC file. + +### Google Cloud Platform environments + +When running on Google Cloud Platform (GCP), including Google Compute Engine +(GCE), Google Kubernetes Engine (GKE), Google App Engine (GAE), Google Cloud +Functions (GCF) and Cloud Run, credentials are retrieved from the attached +service account automatically. Code should be written as if already authenticated. + +For more information, see +[Set up ADC for Google Cloud services](https://cloud.google.com/docs/authentication/provide-credentials-adc#attached-sa). diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md new file mode 100644 index 000000000000..f88957a62ba2 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md @@ -0,0 +1,2 @@ +# Release History + diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile new file mode 100644 index 000000000000..95163a6d11f8 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile @@ -0,0 +1,11 @@ +source "https://rubygems.org" + +gemspec + +gem "google-style", "~> 1.27.1" +gem "minitest", "~> 5.22" +gem "minitest-focus", "~> 1.4" +gem "minitest-rg", "~> 5.3" +gem "rake", ">= 13.0" +gem "redcarpet", "~> 3.6" +gem "yard", "~> 0.9" diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md new file mode 100644 index 000000000000..c261857ba6ad --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md new file mode 100644 index 000000000000..737d3269426c --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md @@ -0,0 +1,144 @@ +# Ruby Client for the BigQuery Data Transfer Service V1 API + +Schedule queries or transfer external data from SaaS applications to Google BigQuery on a regular basis. + +Schedules queries and transfers external data from SaaS applications to Google BigQuery on a regular basis. + +https://github.com/googleapis/google-cloud-ruby + +This gem is a _versioned_ client. It provides basic client classes for a +specific version of the BigQuery Data Transfer Service V1 API. Most users should consider using +the main client gem, +[google-cloud-bigquery-data_transfer](https://rubygems.org/gems/google-cloud-bigquery-data_transfer). +See the section below titled *Which client should I use?* for more information. + +## Installation + +``` +$ gem install google-cloud-bigquery-data_transfer-v1 +``` + +## Before You Begin + +In order to use this library, you first need to go through the following steps: + +1. [Select or create a Cloud Platform project.](https://console.cloud.google.com/project) +1. [Enable billing for your project.](https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project) +1. [Enable the API.](https://console.cloud.google.com/apis/library/bigquerydatatransfer.googleapis.com) +1. [Set up authentication.](AUTHENTICATION.md) + +## Quick Start + +```ruby +require "google/cloud/bigquery/data_transfer/v1" + +client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new +request = ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new # (request fields as keyword arguments...) +response = client.get_data_source request +``` + +View the [Client Library Documentation](https://cloud.google.com/ruby/docs/reference/google-cloud-bigquery-data_transfer-v1/latest) +for class and method documentation. + +See also the [Product Documentation](https://cloud.google.com/bigquery/transfer) +for general usage information. + +## Enabling Logging + +To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library. +The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/current/stdlibs/logger/Logger.html) as shown below, +or a [`Google::Cloud::Logging::Logger`](https://cloud.google.com/ruby/docs/reference/google-cloud-logging/latest) +that will write logs to [Cloud Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb) +and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information. + +Configuring a Ruby stdlib logger: + +```ruby +require "logger" + +module MyLogger + LOGGER = Logger.new $stderr, level: Logger::WARN + def logger + LOGGER + end +end + +# Define a gRPC module-level logger method before grpc/logconfig.rb loads. +module GRPC + extend MyLogger +end +``` + + +## Google Cloud Samples + +To browse ready to use code samples check [Google Cloud Samples](https://cloud.google.com/docs/samples). + +## Supported Ruby Versions + +This library is supported on Ruby 2.7+. + +Google provides official support for Ruby versions that are actively supported +by Ruby Core—that is, Ruby versions that are either in normal maintenance or +in security maintenance, and not end of life. Older versions of Ruby _may_ +still work, but are unsupported and not recommended. See +https://www.ruby-lang.org/en/downloads/branches/ for details about the Ruby +support schedule. + +## Which client should I use? + +Most modern Ruby client libraries for Google APIs come in two flavors: the main +client library with a name such as `google-cloud-bigquery-data_transfer`, +and lower-level _versioned_ client libraries with names such as +`google-cloud-bigquery-data_transfer-v1`. +_In most cases, you should install the main client._ + +### What's the difference between the main client and a versioned client? + +A _versioned client_ provides a basic set of data types and client classes for +a _single version_ of a specific service. (That is, for a service with multiple +versions, there might be a separate versioned client for each service version.) +Most versioned clients are written and maintained by a code generator. + +The _main client_ is designed to provide you with the _recommended_ client +interfaces for the service. There will be only one main client for any given +service, even a service with multiple versions. The main client includes +factory methods for constructing the client objects we recommend for most +users. In some cases, those will be classes provided by an underlying versioned +client; in other cases, they will be handwritten higher-level client objects +with additional capabilities, convenience methods, or best practices built in. +Generally, the main client will default to a recommended service version, +although in some cases you can override this if you need to talk to a specific +service version. + +### Why would I want to use the main client? + +We recommend that most users install the main client gem for a service. You can +identify this gem as the one _without_ a version in its name, e.g. +`google-cloud-bigquery-data_transfer`. +The main client is recommended because it will embody the best practices for +accessing the service, and may also provide more convenient interfaces or +tighter integration into frameworks and third-party libraries. In addition, the +documentation and samples published by Google will generally demonstrate use of +the main client. + +### Why would I want to use a versioned client? + +You can use a versioned client if you are content with a possibly lower-level +class interface, you explicitly want to avoid features provided by the main +client, or you want to access a specific service version not be covered by the +main client. You can identify versioned client gems because the service version +is part of the name, e.g. `google-cloud-bigquery-data_transfer-v1`. + +### What about the google-apis- clients? + +Client library gems with names that begin with `google-apis-` are based on an +older code generation technology. They talk to a REST/JSON backend (whereas +most modern clients talk to a [gRPC](https://grpc.io/) backend) and they may +not offer the same performance, features, and ease of use provided by more +modern clients. + +The `google-apis-` clients have wide coverage across Google services, so you +might need to use one if there is no modern client available for the service. +However, if a modern client is available, we generally recommend it over the +older `google-apis-` clients. diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile new file mode 100644 index 000000000000..92a8296dfacc --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile @@ -0,0 +1,168 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "bundler/setup" +require "bundler/gem_tasks" + +require "rubocop/rake_task" +RuboCop::RakeTask.new + +require "rake/testtask" +desc "Run tests." +Rake::TestTask.new do |t| + t.libs << "test" + t.test_files = FileList["test/**/*_test.rb"] + t.warning = false +end + +desc "Runs the smoke tests." +Rake::TestTask.new :smoke_test do |t| + t.test_files = FileList["acceptance/**/*smoke_test.rb"] + t.warning = false +end + +# Acceptance tests +desc "Run the google-cloud-bigquery-data_transfer-v1 acceptance tests." +task :acceptance, :project, :keyfile do |t, args| + project = args[:project] + project ||= + ENV["DATA_TRANSFER_TEST_PROJECT"] || + ENV["GCLOUD_TEST_PROJECT"] + keyfile = args[:keyfile] + keyfile ||= + ENV["DATA_TRANSFER_TEST_KEYFILE"] || + ENV["GCLOUD_TEST_KEYFILE"] + if keyfile + keyfile = File.read keyfile + else + keyfile ||= + ENV["DATA_TRANSFER_TEST_KEYFILE_JSON"] || + ENV["GCLOUD_TEST_KEYFILE_JSON"] + end + if project.nil? || keyfile.nil? + fail "You must provide a project and keyfile. e.g. rake acceptance[test123, /path/to/keyfile.json] or DATA_TRANSFER_TEST_PROJECT=test123 DATA_TRANSFER_TEST_KEYFILE=/path/to/keyfile.json rake acceptance" + end + require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials" + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Credentials.env_vars.each do |path| + ENV[path] = nil + end + ENV["DATA_TRANSFER_PROJECT"] = project + ENV["DATA_TRANSFER_TEST_PROJECT"] = project + ENV["DATA_TRANSFER_KEYFILE_JSON"] = keyfile + + Rake::Task["acceptance:run"].invoke +end + +namespace :acceptance do + task :run do + if File.directory? "acceptance" + Rake::Task[:smoke_test].invoke + else + puts "The google-cloud-bigquery-data_transfer-v1 gem has no acceptance tests." + end + end + + desc "Run acceptance cleanup." + task :cleanup do + end +end + +task :samples do + Rake::Task["samples:latest"].invoke +end + +namespace :samples do + task :latest do + if File.directory? "samples" + Dir.chdir "samples" do + Bundler.with_clean_env do + ENV["GOOGLE_CLOUD_SAMPLES_TEST"] = "not_master" + sh "bundle update" + sh "bundle exec rake test" + end + end + else + puts "The google-cloud-bigquery-data_transfer-v1 gem has no samples to test." + end + end + + task :master do + if File.directory? "samples" + Dir.chdir "samples" do + Bundler.with_clean_env do + ENV["GOOGLE_CLOUD_SAMPLES_TEST"] = "master" + sh "bundle update" + sh "bundle exec rake test" + end + end + else + puts "The google-cloud-bigquery-data_transfer-v1 gem has no samples to test." + end + end +end + +require "yard" +require "yard/rake/yardoc_task" +YARD::Rake::YardocTask.new do |y| +end + +desc "Run yard-doctest example tests." +task :doctest do + puts "The google-cloud-bigquery-data_transfer-v1 gem does not have doctest tests." +end + +desc "Run the CI build" +task :ci do + header "BUILDING google-cloud-bigquery-data_transfer-v1" + header "google-cloud-bigquery-data_transfer-v1 rubocop", "*" + Rake::Task[:rubocop].invoke + header "google-cloud-bigquery-data_transfer-v1 yard", "*" + Rake::Task[:yard].invoke + header "google-cloud-bigquery-data_transfer-v1 test", "*" + Rake::Task[:test].invoke +end + +namespace :ci do + desc "Run the CI build, with smoke tests." + task :smoke_test do + Rake::Task[:ci].invoke + header "google-cloud-bigquery-data_transfer-v1 smoke_test", "*" + Rake::Task[:smoke_test].invoke + end + desc "Run the CI build, with acceptance tests." + task :acceptance do + Rake::Task[:ci].invoke + header "google-cloud-bigquery-data_transfer-v1 acceptance", "*" + Rake::Task[:acceptance].invoke + end + task :a do + # This is a handy shortcut to save typing + Rake::Task["ci:acceptance"].invoke + end +end + +task default: :test + +def header str, token = "#" + line_length = str.length + 8 + puts "" + puts token * line_length + puts "#{token * 3} #{str} #{token * 3}" + puts token * line_length + puts "" +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json new file mode 100644 index 000000000000..7ae4ebcd10a5 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json @@ -0,0 +1,98 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "ruby", + "protoPackage": "google.cloud.bigquery.datatransfer.v1", + "libraryPackage": "::Google::Cloud::Bigquery::DataTransfer::V1", + "services": { + "DataTransferService": { + "clients": { + "grpc": { + "libraryClient": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client", + "rpcs": { + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "CreateTransferConfig": { + "methods": [ + "create_transfer_config" + ] + }, + "UpdateTransferConfig": { + "methods": [ + "update_transfer_config" + ] + }, + "DeleteTransferConfig": { + "methods": [ + "delete_transfer_config" + ] + }, + "GetTransferConfig": { + "methods": [ + "get_transfer_config" + ] + }, + "ListTransferConfigs": { + "methods": [ + "list_transfer_configs" + ] + }, + "ScheduleTransferRuns": { + "methods": [ + "schedule_transfer_runs" + ] + }, + "StartManualTransferRuns": { + "methods": [ + "start_manual_transfer_runs" + ] + }, + "GetTransferRun": { + "methods": [ + "get_transfer_run" + ] + }, + "DeleteTransferRun": { + "methods": [ + "delete_transfer_run" + ] + }, + "ListTransferRuns": { + "methods": [ + "list_transfer_runs" + ] + }, + "ListTransferLogs": { + "methods": [ + "list_transfer_logs" + ] + }, + "CheckValidCreds": { + "methods": [ + "check_valid_creds" + ] + }, + "EnrollDataSources": { + "methods": [ + "enroll_data_sources" + ] + }, + "UnenrollDataSources": { + "methods": [ + "unenroll_data_sources" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec new file mode 100644 index 000000000000..8d124f6d7e84 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec @@ -0,0 +1,29 @@ +# -*- ruby -*- +# encoding: utf-8 + +require File.expand_path("lib/google/cloud/bigquery/data_transfer/v1/version", __dir__) + +Gem::Specification.new do |gem| + gem.name = "google-cloud-bigquery-data_transfer-v1" + gem.version = Google::Cloud::Bigquery::DataTransfer::V1::VERSION + + gem.authors = ["Google LLC"] + gem.email = "googleapis-packages@google.com" + gem.description = "Schedules queries and transfers external data from SaaS applications to Google BigQuery on a regular basis. Note that google-cloud-bigquery-data_transfer-v1 is a version-specific client library. For most uses, we recommend installing the main client library google-cloud-bigquery-data_transfer instead. See the readme for more details." + gem.summary = "Schedule queries or transfer external data from SaaS applications to Google BigQuery on a regular basis." + gem.homepage = "https://github.com/googleapis/google-cloud-ruby" + gem.license = "Apache-2.0" + + gem.platform = Gem::Platform::RUBY + + gem.files = `git ls-files -- lib/*`.split("\n") + + `git ls-files -- proto_docs/*`.split("\n") + + ["README.md", "LICENSE.md", "AUTHENTICATION.md", ".yardopts"] + gem.require_paths = ["lib"] + + gem.required_ruby_version = ">= 2.7" + + gem.add_dependency "gapic-common", ">= 0.21.1", "< 2.a" + gem.add_dependency "google-cloud-errors", "~> 1.0" + gem.add_dependency "google-cloud-location", ">= 0.7", "< 2.a" +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb new file mode 100644 index 000000000000..af6e59922b17 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# This gem does not autoload during Bundler.require. To load this gem, +# issue explicit require statements for the packages desired, e.g.: +# require "google/cloud/bigquery/data_transfer/v1" diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb new file mode 100644 index 000000000000..c5f4d18ce0af --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" +require "google/cloud/bigquery/data_transfer/v1/version" + +module Google + module Cloud + module Bigquery + module DataTransfer + ## + # API client module. + # + # @example Load this package, including all its services, and instantiate a gRPC client + # + # require "google/cloud/bigquery/data_transfer/v1" + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # @example Load this package, including all its services, and instantiate a REST client + # + # require "google/cloud/bigquery/data_transfer/v1" + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + module V1 + end + end + end + end +end + +helper_path = ::File.join __dir__, "v1", "_helpers.rb" +require "google/cloud/bigquery/data_transfer/v1/_helpers" if ::File.file? helper_path diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb new file mode 100644 index 000000000000..56c4bf1ffbfb --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb @@ -0,0 +1,104 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "gapic/config" + +module Google + module Cloud + module Bigquery + module DataTransfer + ## + # @example Loading just the REST part of this package, including all its services, and instantiating a REST client + # + # require "google/cloud/bigquery/data_transfer/v1/rest" + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + module V1 + ## + # @private + # Initialize the mixin bindings configuration + # + def self.configure + @configure ||= begin + namespace = ["Google", "Cloud", "Bigquery", "DataTransfer"] + parent_config = while namespace.any? + parent_name = namespace.join "::" + parent_const = const_get parent_name + break parent_const.configure if parent_const.respond_to? :configure + namespace.pop + end + + default_config = Configuration.new parent_config + default_config.bindings_override["google.cloud.location.Locations.GetLocation"] = [ + Gapic::Rest::GrpcTranscoder::HttpBinding.create_with_validation( + uri_method: :get, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/?$}, false] + ], + body: nil + ) + ] + default_config.bindings_override["google.cloud.location.Locations.ListLocations"] = [ + + Gapic::Rest::GrpcTranscoder::HttpBinding.create_with_validation( + uri_method: :get, + uri_template: "/v1/{name}/locations", + matches: [ + ["name", %r{^projects/[^/]+/?$}, false] + ], + body: nil + ) + ] + default_config + end + yield @configure if block_given? + @configure + end + + ## + # @private + # Configuration class for the google.cloud.bigquery.datatransfer.v1 package. + # + # This class contains common configuration for all services + # of the google.cloud.bigquery.datatransfer.v1 package. + # + # This configuration is for internal use of the client library classes, + # and it is not intended that the end-users will read or change it. + # + class Configuration + extend ::Gapic::Config + + # @private + # Overrides for http bindings for the RPC of the mixins for this package. + # Services in this package should use these when creating clients for the mixin services. + # @return [::Hash{::Symbol=>::Array<::Gapic::Rest::GrpcTranscoder::HttpBinding>}] + config_attr :bindings_override, {}, ::Hash, nil + + # @private + def initialize parent_config = nil + @parent_config = parent_config unless parent_config.nil? + + yield self if block_given? + end + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb new file mode 100644 index 000000000000..0174fd3ee9b3 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "gapic/common" +require "gapic/config" +require "gapic/config/method" + +require "google/cloud/bigquery/data_transfer/v1/version" + +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/client" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + ## + # This API allows users to manage their data transfers into BigQuery. + # + # @example Load this service and instantiate a gRPC client + # + # require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # @example Load this service and instantiate a REST client + # + # require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + module DataTransferService + end + end + end + end + end +end + +helper_path = ::File.join __dir__, "data_transfer_service", "helpers.rb" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/helpers" if ::File.file? helper_path diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb new file mode 100644 index 000000000000..b0c00b138ba2 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb @@ -0,0 +1,2127 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "google/cloud/errors" +require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" +require "google/cloud/location" + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + module DataTransferService + ## + # Client for the DataTransferService service. + # + # This API allows users to manage their data transfers into BigQuery. + # + class Client + # @private + API_VERSION = "" + + # @private + DEFAULT_ENDPOINT_TEMPLATE = "bigquerydatatransfer.$UNIVERSE_DOMAIN$" + + include Paths + + # @private + attr_reader :data_transfer_service_stub + + ## + # Configure the DataTransferService Client class. + # + # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration} + # for a description of the configuration fields. + # + # @example + # + # # Modify the configuration for all DataTransferService clients + # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config| + # config.timeout = 10.0 + # end + # + # @yield [config] Configure the Client client. + # @yieldparam config [Client::Configuration] + # + # @return [Client::Configuration] + # + def self.configure + @configure ||= begin + namespace = ["Google", "Cloud", "Bigquery", "DataTransfer", "V1"] + parent_config = while namespace.any? + parent_name = namespace.join "::" + parent_const = const_get parent_name + break parent_const.configure if parent_const.respond_to? :configure + namespace.pop + end + default_config = Client::Configuration.new parent_config + + default_config.rpcs.get_data_source.timeout = 20.0 + default_config.rpcs.get_data_source.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_data_sources.timeout = 20.0 + default_config.rpcs.list_data_sources.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.create_transfer_config.timeout = 30.0 + + default_config.rpcs.update_transfer_config.timeout = 30.0 + + default_config.rpcs.delete_transfer_config.timeout = 20.0 + default_config.rpcs.delete_transfer_config.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.get_transfer_config.timeout = 20.0 + default_config.rpcs.get_transfer_config.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_transfer_configs.timeout = 20.0 + default_config.rpcs.list_transfer_configs.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.schedule_transfer_runs.timeout = 30.0 + + default_config.rpcs.get_transfer_run.timeout = 20.0 + default_config.rpcs.get_transfer_run.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.delete_transfer_run.timeout = 20.0 + default_config.rpcs.delete_transfer_run.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_transfer_runs.timeout = 20.0 + default_config.rpcs.list_transfer_runs.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_transfer_logs.timeout = 20.0 + default_config.rpcs.list_transfer_logs.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.check_valid_creds.timeout = 20.0 + default_config.rpcs.check_valid_creds.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config + end + yield @configure if block_given? + @configure + end + + ## + # Configure the DataTransferService Client instance. + # + # The configuration is set to the derived mode, meaning that values can be changed, + # but structural changes (adding new fields, etc.) are not allowed. Structural changes + # should be made on {Client.configure}. + # + # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration} + # for a description of the configuration fields. + # + # @yield [config] Configure the Client client. + # @yieldparam config [Client::Configuration] + # + # @return [Client::Configuration] + # + def configure + yield @config if block_given? + @config + end + + ## + # The effective universe domain + # + # @return [String] + # + def universe_domain + @data_transfer_service_stub.universe_domain + end + + ## + # Create a new DataTransferService client object. + # + # @example + # + # # Create a client using the default configuration + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a client using a custom configuration + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + # config.timeout = 10.0 + # end + # + # @yield [config] Configure the DataTransferService client. + # @yieldparam config [Client::Configuration] + # + def initialize + # These require statements are intentionally placed here to initialize + # the gRPC module only when it's required. + # See https://github.com/googleapis/toolkit/issues/446 + require "gapic/grpc" + require "google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb" + + # Create the configuration object + @config = Configuration.new Client.configure + + # Yield the configuration if needed + yield @config if block_given? + + # Create credentials + credentials = @config.credentials + # Use self-signed JWT if the endpoint is unchanged from default, + # but only if the default endpoint does not have a region prefix. + enable_self_signed_jwt = @config.endpoint.nil? || + (@config.endpoint == Configuration::DEFAULT_ENDPOINT && + !@config.endpoint.split(".").first.include?("-")) + credentials ||= Credentials.default scope: @config.scope, + enable_self_signed_jwt: enable_self_signed_jwt + if credentials.is_a?(::String) || credentials.is_a?(::Hash) + credentials = Credentials.new credentials, scope: @config.scope + end + @quota_project_id = @config.quota_project + @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id + + @data_transfer_service_stub = ::Gapic::ServiceStub.new( + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Stub, + credentials: credentials, + endpoint: @config.endpoint, + endpoint_template: DEFAULT_ENDPOINT_TEMPLATE, + universe_domain: @config.universe_domain, + channel_args: @config.channel_args, + interceptors: @config.interceptors, + channel_pool_config: @config.channel_pool + ) + + @location_client = Google::Cloud::Location::Locations::Client.new do |config| + config.credentials = credentials + config.quota_project = @quota_project_id + config.endpoint = @data_transfer_service_stub.endpoint + config.universe_domain = @data_transfer_service_stub.universe_domain + end + end + + ## + # Get the associated client for mix-in of the Locations. + # + # @return [Google::Cloud::Location::Locations::Client] + # + attr_reader :location_client + + # Service calls + + ## + # Retrieves a supported data source and returns its settings. + # + # @overload get_data_source(request, options = nil) + # Pass arguments to `get_data_source` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload get_data_source(name: nil) + # Pass arguments to `get_data_source` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/dataSources/{data_source_id}` or + # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new + # + # # Call the get_data_source method. + # result = client.get_data_source request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource. + # p result + # + def get_data_source request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.get_data_source.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.get_data_source.timeout, + metadata: metadata, + retry_policy: @config.rpcs.get_data_source.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :get_data_source, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Lists supported data sources and returns their settings. + # + # @overload list_data_sources(request, options = nil) + # Pass arguments to `list_data_sources` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload list_data_sources(parent: nil, page_token: nil, page_size: nil) + # Pass arguments to `list_data_sources` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. The BigQuery project id for which data sources should be + # returned. Must be in the form: `projects/{project_id}` or + # `projects/{project_id}/locations/{location_id}` + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListDataSourcesRequest` list results. For multiple-page + # results, `ListDataSourcesResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new + # + # # Call the list_data_sources method. + # result = client.list_data_sources request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource. + # p item + # end + # + def list_data_sources request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.list_data_sources.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.parent + header_params["parent"] = request.parent + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.list_data_sources.timeout, + metadata: metadata, + retry_policy: @config.rpcs.list_data_sources.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :list_data_sources, request, options: options do |response, operation| + response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_data_sources, request, response, operation, options + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Creates a new data transfer configuration. + # + # @overload create_transfer_config(request, options = nil) + # Pass arguments to `create_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload create_transfer_config(parent: nil, transfer_config: nil, authorization_code: nil, version_info: nil, service_account_name: nil) + # Pass arguments to `create_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. The BigQuery project id where the transfer configuration should + # be created. Must be in the format + # projects/\\{project_id}/locations/\\{location_id} or projects/\\{project_id}. If + # specified location and location of the destination bigquery dataset do not + # match - the request will fail. + # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] + # Required. Data transfer configuration to create. + # @param authorization_code [::String] + # Deprecated: Authorization code was required when + # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used + # in any data sources. Use `version_info` instead. + # + # Optional OAuth2 authorization code to use with this transfer configuration. + # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' + # and new credentials are needed, as indicated by `CheckValidCreds`. In order + # to obtain authorization_code, make a request to the following URL: + #
+              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+              #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # create the transfer config. + # @param version_info [::String] + # Optional version info. This parameter replaces `authorization_code` which + # is no longer used in any data sources. This is required only if + # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials + # are needed, as indicated by `CheckValidCreds`. In order to obtain version + # info, make a request to the following URL: + #
+              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+              #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # create the transfer config. + # @param service_account_name [::String] + # Optional service account email. If this field is set, the transfer config + # will be created with this service account's credentials. It requires that + # the requesting user calling this API has permissions to act as this service + # account. + # + # Note that not all data sources support service account credentials when + # creating a transfer config. For the latest list of data sources, read about + # [using service + # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new + # + # # Call the create_transfer_config method. + # result = client.create_transfer_config request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p result + # + def create_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.create_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.parent + header_params["parent"] = request.parent + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.create_transfer_config.timeout, + metadata: metadata, + retry_policy: @config.rpcs.create_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :create_transfer_config, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Updates a data transfer configuration. + # All fields must be set, even if they are not updated. + # + # @overload update_transfer_config(request, options = nil) + # Pass arguments to `update_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload update_transfer_config(transfer_config: nil, authorization_code: nil, update_mask: nil, version_info: nil, service_account_name: nil) + # Pass arguments to `update_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] + # Required. Data transfer configuration to create. + # @param authorization_code [::String] + # Deprecated: Authorization code was required when + # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used + # in any data sources. Use `version_info` instead. + # + # Optional OAuth2 authorization code to use with this transfer configuration. + # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' + # and new credentials are needed, as indicated by `CheckValidCreds`. In order + # to obtain authorization_code, make a request to the following URL: + #
+              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+              #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # update the transfer config. + # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] + # Required. Required list of fields to be updated in this request. + # @param version_info [::String] + # Optional version info. This parameter replaces `authorization_code` which + # is no longer used in any data sources. This is required only if + # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials + # are needed, as indicated by `CheckValidCreds`. In order to obtain version + # info, make a request to the following URL: + #
+              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+              #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # update the transfer config. + # @param service_account_name [::String] + # Optional service account email. If this field is set, the transfer config + # will be created with this service account's credentials. It requires that + # the requesting user calling this API has permissions to act as this service + # account. + # + # Note that not all data sources support service account credentials when + # creating a transfer config. For the latest list of data sources, read about + # [using service + # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new + # + # # Call the update_transfer_config method. + # result = client.update_transfer_config request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p result + # + def update_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.update_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.transfer_config&.name + header_params["transfer_config.name"] = request.transfer_config.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.update_transfer_config.timeout, + metadata: metadata, + retry_policy: @config.rpcs.update_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :update_transfer_config, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Deletes a data transfer configuration, including any associated transfer + # runs and logs. + # + # @overload delete_transfer_config(request, options = nil) + # Pass arguments to `delete_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload delete_transfer_config(name: nil) + # Pass arguments to `delete_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Protobuf::Empty] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new + # + # # Call the delete_transfer_config method. + # result = client.delete_transfer_config request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def delete_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.delete_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.delete_transfer_config.timeout, + metadata: metadata, + retry_policy: @config.rpcs.delete_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :delete_transfer_config, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about a data transfer config. + # + # @overload get_transfer_config(request, options = nil) + # Pass arguments to `get_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload get_transfer_config(name: nil) + # Pass arguments to `get_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new + # + # # Call the get_transfer_config method. + # result = client.get_transfer_config request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p result + # + def get_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.get_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.get_transfer_config.timeout, + metadata: metadata, + retry_policy: @config.rpcs.get_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :get_transfer_config, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about all transfer configs owned by a project in the + # specified location. + # + # @overload list_transfer_configs(request, options = nil) + # Pass arguments to `list_transfer_configs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload list_transfer_configs(parent: nil, data_source_ids: nil, page_token: nil, page_size: nil) + # Pass arguments to `list_transfer_configs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. The BigQuery project id for which transfer configs + # should be returned: `projects/{project_id}` or + # `projects/{project_id}/locations/{location_id}` + # @param data_source_ids [::Array<::String>] + # When specified, only configurations of requested data sources are returned. + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransfersRequest` list results. For multiple-page + # results, `ListTransfersResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new + # + # # Call the list_transfer_configs method. + # result = client.list_transfer_configs request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p item + # end + # + def list_transfer_configs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.list_transfer_configs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.parent + header_params["parent"] = request.parent + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.list_transfer_configs.timeout, + metadata: metadata, + retry_policy: @config.rpcs.list_transfer_configs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :list_transfer_configs, request, options: options do |response, operation| + response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_configs, request, response, operation, options + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Creates transfer runs for a time range [start_time, end_time]. + # For each date - or whatever granularity the data source supports - in the + # range, one transfer run is created. + # Note that runs are created per UTC time in the time range. + # DEPRECATED: use StartManualTransferRuns instead. + # + # @deprecated This method is deprecated and may be removed in the next major version update. + # + # @overload schedule_transfer_runs(request, options = nil) + # Pass arguments to `schedule_transfer_runs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload schedule_transfer_runs(parent: nil, start_time: nil, end_time: nil) + # Pass arguments to `schedule_transfer_runs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Transfer configuration name in the form: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @param start_time [::Google::Protobuf::Timestamp, ::Hash] + # Required. Start time of the range of transfer runs. For example, + # `"2017-05-25T00:00:00+00:00"`. + # @param end_time [::Google::Protobuf::Timestamp, ::Hash] + # Required. End time of the range of transfer runs. For example, + # `"2017-05-30T00:00:00+00:00"`. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new + # + # # Call the schedule_transfer_runs method. + # result = client.schedule_transfer_runs request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse. + # p result + # + def schedule_transfer_runs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.schedule_transfer_runs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.parent + header_params["parent"] = request.parent + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.schedule_transfer_runs.timeout, + metadata: metadata, + retry_policy: @config.rpcs.schedule_transfer_runs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :schedule_transfer_runs, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Start manual transfer runs to be executed now with schedule_time equal to + # current time. The transfer runs can be created for a time range where the + # run_time is between start_time (inclusive) and end_time (exclusive), or for + # a specific run_time. + # + # @overload start_manual_transfer_runs(request, options = nil) + # Pass arguments to `start_manual_transfer_runs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload start_manual_transfer_runs(parent: nil, requested_time_range: nil, requested_run_time: nil) + # Pass arguments to `start_manual_transfer_runs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Transfer configuration name in the form: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @param requested_time_range [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange, ::Hash] + # A time_range start and end timestamp for historical data files or reports + # that are scheduled to be transferred by the scheduled transfer run. + # requested_time_range must be a past time and cannot include future time + # values. + # @param requested_run_time [::Google::Protobuf::Timestamp, ::Hash] + # A run_time timestamp for historical data files or reports + # that are scheduled to be transferred by the scheduled transfer run. + # requested_run_time must be a past time and cannot include future time + # values. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new + # + # # Call the start_manual_transfer_runs method. + # result = client.start_manual_transfer_runs request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse. + # p result + # + def start_manual_transfer_runs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.start_manual_transfer_runs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.parent + header_params["parent"] = request.parent + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.start_manual_transfer_runs.timeout, + metadata: metadata, + retry_policy: @config.rpcs.start_manual_transfer_runs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :start_manual_transfer_runs, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about the particular transfer run. + # + # @overload get_transfer_run(request, options = nil) + # Pass arguments to `get_transfer_run` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload get_transfer_run(name: nil) + # Pass arguments to `get_transfer_run` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + # or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new + # + # # Call the get_transfer_run method. + # result = client.get_transfer_run request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. + # p result + # + def get_transfer_run request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.get_transfer_run.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.get_transfer_run.timeout, + metadata: metadata, + retry_policy: @config.rpcs.get_transfer_run.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :get_transfer_run, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Deletes the specified transfer run. + # + # @overload delete_transfer_run(request, options = nil) + # Pass arguments to `delete_transfer_run` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload delete_transfer_run(name: nil) + # Pass arguments to `delete_transfer_run` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + # or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Protobuf::Empty] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new + # + # # Call the delete_transfer_run method. + # result = client.delete_transfer_run request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def delete_transfer_run request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.delete_transfer_run.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.delete_transfer_run.timeout, + metadata: metadata, + retry_policy: @config.rpcs.delete_transfer_run.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :delete_transfer_run, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about running and completed transfer runs. + # + # @overload list_transfer_runs(request, options = nil) + # Pass arguments to `list_transfer_runs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload list_transfer_runs(parent: nil, states: nil, page_token: nil, page_size: nil, run_attempt: nil) + # Pass arguments to `list_transfer_runs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Name of transfer configuration for which transfer runs should be + # retrieved. Format of transfer configuration resource name is: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @param states [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferState>] + # When specified, only transfer runs with requested states are returned. + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransferRunsRequest` list results. For multiple-page + # results, `ListTransferRunsResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @param run_attempt [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt] + # Indicates how run attempts are to be pulled. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new + # + # # Call the list_transfer_runs method. + # result = client.list_transfer_runs request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. + # p item + # end + # + def list_transfer_runs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.list_transfer_runs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.parent + header_params["parent"] = request.parent + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.list_transfer_runs.timeout, + metadata: metadata, + retry_policy: @config.rpcs.list_transfer_runs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :list_transfer_runs, request, options: options do |response, operation| + response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_runs, request, response, operation, options + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns log messages for the transfer run. + # + # @overload list_transfer_logs(request, options = nil) + # Pass arguments to `list_transfer_logs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload list_transfer_logs(parent: nil, page_token: nil, page_size: nil, message_types: nil) + # Pass arguments to `list_transfer_logs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Transfer run name in the form: + # `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransferLogsRequest` list results. For multiple-page + # results, `ListTransferLogsResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @param message_types [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>] + # Message types to return. If not populated - INFO, WARNING and ERROR + # messages are returned. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new + # + # # Call the list_transfer_logs method. + # result = client.list_transfer_logs request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage. + # p item + # end + # + def list_transfer_logs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.list_transfer_logs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.parent + header_params["parent"] = request.parent + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.list_transfer_logs.timeout, + metadata: metadata, + retry_policy: @config.rpcs.list_transfer_logs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :list_transfer_logs, request, options: options do |response, operation| + response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_logs, request, response, operation, options + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns true if valid credentials exist for the given data source and + # requesting user. + # + # @overload check_valid_creds(request, options = nil) + # Pass arguments to `check_valid_creds` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload check_valid_creds(name: nil) + # Pass arguments to `check_valid_creds` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The data source in the form: + # `projects/{project_id}/dataSources/{data_source_id}` or + # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new + # + # # Call the check_valid_creds method. + # result = client.check_valid_creds request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse. + # p result + # + def check_valid_creds request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.check_valid_creds.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.check_valid_creds.timeout, + metadata: metadata, + retry_policy: @config.rpcs.check_valid_creds.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :check_valid_creds, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Enroll data sources in a user project. This allows users to create transfer + # configurations for these data sources. They will also appear in the + # ListDataSources RPC and as such, will appear in the + # [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents + # can be found in the public guide for + # [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and + # [Data Transfer + # Service](https://cloud.google.com/bigquery/docs/working-with-transfers). + # + # @overload enroll_data_sources(request, options = nil) + # Pass arguments to `enroll_data_sources` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload enroll_data_sources(name: nil, data_source_ids: nil) + # Pass arguments to `enroll_data_sources` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The name of the project resource in the form: + # `projects/{project_id}` + # @param data_source_ids [::Array<::String>] + # Data sources that are enrolled. It is required to provide at least one + # data source id. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Protobuf::Empty] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new + # + # # Call the enroll_data_sources method. + # result = client.enroll_data_sources request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def enroll_data_sources request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.enroll_data_sources.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.enroll_data_sources.timeout, + metadata: metadata, + retry_policy: @config.rpcs.enroll_data_sources.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :enroll_data_sources, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Unenroll data sources in a user project. This allows users to remove + # transfer configurations for these data sources. They will no longer appear + # in the ListDataSources RPC and will also no longer appear in the [BigQuery + # UI](https://console.cloud.google.com/bigquery). Data transfers + # configurations of unenrolled data sources will not be scheduled. + # + # @overload unenroll_data_sources(request, options = nil) + # Pass arguments to `unenroll_data_sources` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. + # + # @overload unenroll_data_sources(name: nil, data_source_ids: nil) + # Pass arguments to `unenroll_data_sources` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The name of the project resource in the form: + # `projects/{project_id}` + # @param data_source_ids [::Array<::String>] + # Data sources that are unenrolled. It is required to provide at least one + # data source id. + # + # @yield [response, operation] Access the result along with the RPC operation + # @yieldparam response [::Google::Protobuf::Empty] + # @yieldparam operation [::GRPC::ActiveCall::Operation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the RPC is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new + # + # # Call the unenroll_data_sources method. + # result = client.unenroll_data_sources request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def unenroll_data_sources request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + metadata = @config.rpcs.unenroll_data_sources.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION + metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + header_params = {} + if request.name + header_params["name"] = request.name + end + + request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") + metadata[:"x-goog-request-params"] ||= request_params_header + + options.apply_defaults timeout: @config.rpcs.unenroll_data_sources.timeout, + metadata: metadata, + retry_policy: @config.rpcs.unenroll_data_sources.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.call_rpc :unenroll_data_sources, request, options: options do |response, operation| + yield response, operation if block_given? + return response + end + rescue ::GRPC::BadStatus => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Configuration class for the DataTransferService API. + # + # This class represents the configuration for DataTransferService, + # providing control over timeouts, retry behavior, logging, transport + # parameters, and other low-level controls. Certain parameters can also be + # applied individually to specific RPCs. See + # {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration::Rpcs} + # for a list of RPCs that can be configured independently. + # + # Configuration can be applied globally to all clients, or to a single client + # on construction. + # + # @example + # + # # Modify the global config, setting the timeout for + # # get_data_source to 20 seconds, + # # and all remaining timeouts to 10 seconds. + # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config| + # config.timeout = 10.0 + # config.rpcs.get_data_source.timeout = 20.0 + # end + # + # # Apply the above configuration only to a new client. + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + # config.timeout = 10.0 + # config.rpcs.get_data_source.timeout = 20.0 + # end + # + # @!attribute [rw] endpoint + # A custom service endpoint, as a hostname or hostname:port. The default is + # nil, indicating to use the default endpoint in the current universe domain. + # @return [::String,nil] + # @!attribute [rw] credentials + # Credentials to send with calls. You may provide any of the following types: + # * (`String`) The path to a service account key file in JSON format + # * (`Hash`) A service account key as a Hash + # * (`Google::Auth::Credentials`) A googleauth credentials object + # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials)) + # * (`Signet::OAuth2::Client`) A signet oauth2 client object + # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client)) + # * (`GRPC::Core::Channel`) a gRPC channel with included credentials + # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object + # * (`nil`) indicating no credentials + # @return [::Object] + # @!attribute [rw] scope + # The OAuth scopes + # @return [::Array<::String>] + # @!attribute [rw] lib_name + # The library name as recorded in instrumentation and logging + # @return [::String] + # @!attribute [rw] lib_version + # The library version as recorded in instrumentation and logging + # @return [::String] + # @!attribute [rw] channel_args + # Extra parameters passed to the gRPC channel. Note: this is ignored if a + # `GRPC::Core::Channel` object is provided as the credential. + # @return [::Hash] + # @!attribute [rw] interceptors + # An array of interceptors that are run before calls are executed. + # @return [::Array<::GRPC::ClientInterceptor>] + # @!attribute [rw] timeout + # The call timeout in seconds. + # @return [::Numeric] + # @!attribute [rw] metadata + # Additional gRPC headers to be sent with the call. + # @return [::Hash{::Symbol=>::String}] + # @!attribute [rw] retry_policy + # The retry policy. The value is a hash with the following keys: + # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. + # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. + # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. + # * `:retry_codes` (*type:* `Array`) - The error codes that should + # trigger a retry. + # @return [::Hash] + # @!attribute [rw] quota_project + # A separate project against which to charge quota. + # @return [::String] + # @!attribute [rw] universe_domain + # The universe domain within which to make requests. This determines the + # default endpoint URL. The default value of nil uses the environment + # universe (usually the default "googleapis.com" universe). + # @return [::String,nil] + # + class Configuration + extend ::Gapic::Config + + # @private + # The endpoint specific to the default "googleapis.com" universe. Deprecated. + DEFAULT_ENDPOINT = "bigquerydatatransfer.googleapis.com" + + config_attr :endpoint, nil, ::String, nil + config_attr :credentials, nil do |value| + allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil] + allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC + allowed.any? { |klass| klass === value } + end + config_attr :scope, nil, ::String, ::Array, nil + config_attr :lib_name, nil, ::String, nil + config_attr :lib_version, nil, ::String, nil + config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil) + config_attr :interceptors, nil, ::Array, nil + config_attr :timeout, nil, ::Numeric, nil + config_attr :metadata, nil, ::Hash, nil + config_attr :retry_policy, nil, ::Hash, ::Proc, nil + config_attr :quota_project, nil, ::String, nil + config_attr :universe_domain, nil, ::String, nil + + # @private + def initialize parent_config = nil + @parent_config = parent_config unless parent_config.nil? + + yield self if block_given? + end + + ## + # Configurations for individual RPCs + # @return [Rpcs] + # + def rpcs + @rpcs ||= begin + parent_rpcs = nil + parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs) + Rpcs.new parent_rpcs + end + end + + ## + # Configuration for the channel pool + # @return [::Gapic::ServiceStub::ChannelPool::Configuration] + # + def channel_pool + @channel_pool ||= ::Gapic::ServiceStub::ChannelPool::Configuration.new + end + + ## + # Configuration RPC class for the DataTransferService API. + # + # Includes fields providing the configuration for each RPC in this service. + # Each configuration object is of type `Gapic::Config::Method` and includes + # the following configuration fields: + # + # * `timeout` (*type:* `Numeric`) - The call timeout in seconds + # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers + # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields + # include the following keys: + # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. + # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. + # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. + # * `:retry_codes` (*type:* `Array`) - The error codes that should + # trigger a retry. + # + class Rpcs + ## + # RPC-specific configuration for `get_data_source` + # @return [::Gapic::Config::Method] + # + attr_reader :get_data_source + ## + # RPC-specific configuration for `list_data_sources` + # @return [::Gapic::Config::Method] + # + attr_reader :list_data_sources + ## + # RPC-specific configuration for `create_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :create_transfer_config + ## + # RPC-specific configuration for `update_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :update_transfer_config + ## + # RPC-specific configuration for `delete_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :delete_transfer_config + ## + # RPC-specific configuration for `get_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :get_transfer_config + ## + # RPC-specific configuration for `list_transfer_configs` + # @return [::Gapic::Config::Method] + # + attr_reader :list_transfer_configs + ## + # RPC-specific configuration for `schedule_transfer_runs` + # @return [::Gapic::Config::Method] + # + attr_reader :schedule_transfer_runs + ## + # RPC-specific configuration for `start_manual_transfer_runs` + # @return [::Gapic::Config::Method] + # + attr_reader :start_manual_transfer_runs + ## + # RPC-specific configuration for `get_transfer_run` + # @return [::Gapic::Config::Method] + # + attr_reader :get_transfer_run + ## + # RPC-specific configuration for `delete_transfer_run` + # @return [::Gapic::Config::Method] + # + attr_reader :delete_transfer_run + ## + # RPC-specific configuration for `list_transfer_runs` + # @return [::Gapic::Config::Method] + # + attr_reader :list_transfer_runs + ## + # RPC-specific configuration for `list_transfer_logs` + # @return [::Gapic::Config::Method] + # + attr_reader :list_transfer_logs + ## + # RPC-specific configuration for `check_valid_creds` + # @return [::Gapic::Config::Method] + # + attr_reader :check_valid_creds + ## + # RPC-specific configuration for `enroll_data_sources` + # @return [::Gapic::Config::Method] + # + attr_reader :enroll_data_sources + ## + # RPC-specific configuration for `unenroll_data_sources` + # @return [::Gapic::Config::Method] + # + attr_reader :unenroll_data_sources + + # @private + def initialize parent_rpcs = nil + get_data_source_config = parent_rpcs.get_data_source if parent_rpcs.respond_to? :get_data_source + @get_data_source = ::Gapic::Config::Method.new get_data_source_config + list_data_sources_config = parent_rpcs.list_data_sources if parent_rpcs.respond_to? :list_data_sources + @list_data_sources = ::Gapic::Config::Method.new list_data_sources_config + create_transfer_config_config = parent_rpcs.create_transfer_config if parent_rpcs.respond_to? :create_transfer_config + @create_transfer_config = ::Gapic::Config::Method.new create_transfer_config_config + update_transfer_config_config = parent_rpcs.update_transfer_config if parent_rpcs.respond_to? :update_transfer_config + @update_transfer_config = ::Gapic::Config::Method.new update_transfer_config_config + delete_transfer_config_config = parent_rpcs.delete_transfer_config if parent_rpcs.respond_to? :delete_transfer_config + @delete_transfer_config = ::Gapic::Config::Method.new delete_transfer_config_config + get_transfer_config_config = parent_rpcs.get_transfer_config if parent_rpcs.respond_to? :get_transfer_config + @get_transfer_config = ::Gapic::Config::Method.new get_transfer_config_config + list_transfer_configs_config = parent_rpcs.list_transfer_configs if parent_rpcs.respond_to? :list_transfer_configs + @list_transfer_configs = ::Gapic::Config::Method.new list_transfer_configs_config + schedule_transfer_runs_config = parent_rpcs.schedule_transfer_runs if parent_rpcs.respond_to? :schedule_transfer_runs + @schedule_transfer_runs = ::Gapic::Config::Method.new schedule_transfer_runs_config + start_manual_transfer_runs_config = parent_rpcs.start_manual_transfer_runs if parent_rpcs.respond_to? :start_manual_transfer_runs + @start_manual_transfer_runs = ::Gapic::Config::Method.new start_manual_transfer_runs_config + get_transfer_run_config = parent_rpcs.get_transfer_run if parent_rpcs.respond_to? :get_transfer_run + @get_transfer_run = ::Gapic::Config::Method.new get_transfer_run_config + delete_transfer_run_config = parent_rpcs.delete_transfer_run if parent_rpcs.respond_to? :delete_transfer_run + @delete_transfer_run = ::Gapic::Config::Method.new delete_transfer_run_config + list_transfer_runs_config = parent_rpcs.list_transfer_runs if parent_rpcs.respond_to? :list_transfer_runs + @list_transfer_runs = ::Gapic::Config::Method.new list_transfer_runs_config + list_transfer_logs_config = parent_rpcs.list_transfer_logs if parent_rpcs.respond_to? :list_transfer_logs + @list_transfer_logs = ::Gapic::Config::Method.new list_transfer_logs_config + check_valid_creds_config = parent_rpcs.check_valid_creds if parent_rpcs.respond_to? :check_valid_creds + @check_valid_creds = ::Gapic::Config::Method.new check_valid_creds_config + enroll_data_sources_config = parent_rpcs.enroll_data_sources if parent_rpcs.respond_to? :enroll_data_sources + @enroll_data_sources = ::Gapic::Config::Method.new enroll_data_sources_config + unenroll_data_sources_config = parent_rpcs.unenroll_data_sources if parent_rpcs.respond_to? :unenroll_data_sources + @unenroll_data_sources = ::Gapic::Config::Method.new unenroll_data_sources_config + + yield self if block_given? + end + end + end + end + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb new file mode 100644 index 000000000000..4fe857bc5f97 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "googleauth" + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + module DataTransferService + # Credentials for the DataTransferService API. + class Credentials < ::Google::Auth::Credentials + self.scope = [ + "https://www.googleapis.com/auth/cloud-platform" + ] + self.env_vars = [ + "DATA_TRANSFER_CREDENTIALS", + "DATA_TRANSFER_KEYFILE", + "GOOGLE_CLOUD_CREDENTIALS", + "GOOGLE_CLOUD_KEYFILE", + "GCLOUD_KEYFILE", + "DATA_TRANSFER_CREDENTIALS_JSON", + "DATA_TRANSFER_KEYFILE_JSON", + "GOOGLE_CLOUD_CREDENTIALS_JSON", + "GOOGLE_CLOUD_KEYFILE_JSON", + "GCLOUD_KEYFILE_JSON" + ] + self.paths = [ + "~/.config/google_cloud/application_default_credentials.json" + ] + end + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb new file mode 100644 index 000000000000..3083e79e4b05 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb @@ -0,0 +1,193 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + module DataTransferService + # Path helper methods for the DataTransferService API. + module Paths + ## + # Create a fully-qualified DataSource resource string. + # + # @overload data_source_path(project:, data_source:) + # The resource will be in the following format: + # + # `projects/{project}/dataSources/{data_source}` + # + # @param project [String] + # @param data_source [String] + # + # @overload data_source_path(project:, location:, data_source:) + # The resource will be in the following format: + # + # `projects/{project}/locations/{location}/dataSources/{data_source}` + # + # @param project [String] + # @param location [String] + # @param data_source [String] + # + # @return [::String] + def data_source_path **args + resources = { + "data_source:project" => (proc do |project:, data_source:| + raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" + + "projects/#{project}/dataSources/#{data_source}" + end), + "data_source:location:project" => (proc do |project:, location:, data_source:| + raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" + raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/" + + "projects/#{project}/locations/#{location}/dataSources/#{data_source}" + end) + } + + resource = resources[args.keys.sort.join(":")] + raise ::ArgumentError, "no resource found for values #{args.keys}" if resource.nil? + resource.call(**args) + end + + ## + # Create a fully-qualified Location resource string. + # + # The resource will be in the following format: + # + # `projects/{project}/locations/{location}` + # + # @param project [String] + # @param location [String] + # + # @return [::String] + def location_path project:, location: + raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" + + "projects/#{project}/locations/#{location}" + end + + ## + # Create a fully-qualified Project resource string. + # + # The resource will be in the following format: + # + # `projects/{project}` + # + # @param project [String] + # + # @return [::String] + def project_path project: + "projects/#{project}" + end + + ## + # Create a fully-qualified Run resource string. + # + # @overload run_path(project:, transfer_config:, run:) + # The resource will be in the following format: + # + # `projects/{project}/transferConfigs/{transfer_config}/runs/{run}` + # + # @param project [String] + # @param transfer_config [String] + # @param run [String] + # + # @overload run_path(project:, location:, transfer_config:, run:) + # The resource will be in the following format: + # + # `projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}` + # + # @param project [String] + # @param location [String] + # @param transfer_config [String] + # @param run [String] + # + # @return [::String] + def run_path **args + resources = { + "project:run:transfer_config" => (proc do |project:, transfer_config:, run:| + raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" + raise ::ArgumentError, "transfer_config cannot contain /" if transfer_config.to_s.include? "/" + + "projects/#{project}/transferConfigs/#{transfer_config}/runs/#{run}" + end), + "location:project:run:transfer_config" => (proc do |project:, location:, transfer_config:, run:| + raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" + raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/" + raise ::ArgumentError, "transfer_config cannot contain /" if transfer_config.to_s.include? "/" + + "projects/#{project}/locations/#{location}/transferConfigs/#{transfer_config}/runs/#{run}" + end) + } + + resource = resources[args.keys.sort.join(":")] + raise ::ArgumentError, "no resource found for values #{args.keys}" if resource.nil? + resource.call(**args) + end + + ## + # Create a fully-qualified TransferConfig resource string. + # + # @overload transfer_config_path(project:, transfer_config:) + # The resource will be in the following format: + # + # `projects/{project}/transferConfigs/{transfer_config}` + # + # @param project [String] + # @param transfer_config [String] + # + # @overload transfer_config_path(project:, location:, transfer_config:) + # The resource will be in the following format: + # + # `projects/{project}/locations/{location}/transferConfigs/{transfer_config}` + # + # @param project [String] + # @param location [String] + # @param transfer_config [String] + # + # @return [::String] + def transfer_config_path **args + resources = { + "project:transfer_config" => (proc do |project:, transfer_config:| + raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" + + "projects/#{project}/transferConfigs/#{transfer_config}" + end), + "location:project:transfer_config" => (proc do |project:, location:, transfer_config:| + raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" + raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/" + + "projects/#{project}/locations/#{location}/transferConfigs/#{transfer_config}" + end) + } + + resource = resources[args.keys.sort.join(":")] + raise ::ArgumentError, "no resource found for values #{args.keys}" if resource.nil? + resource.call(**args) + end + + extend self + end + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb new file mode 100644 index 000000000000..5a33667e70ce --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "gapic/rest" +require "gapic/config" +require "gapic/config/method" + +require "google/cloud/bigquery/data_transfer/v1/version" +require "google/cloud/bigquery/data_transfer/v1/bindings_override" + +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client" + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + ## + # This API allows users to manage their data transfers into BigQuery. + # + # To load this service and instantiate a REST client: + # + # require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + module DataTransferService + # Client for the REST transport + module Rest + end + end + end + end + end + end +end + +helper_path = ::File.join __dir__, "rest", "helpers.rb" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/helpers" if ::File.file? helper_path diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb new file mode 100644 index 000000000000..dd029392dfa6 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb @@ -0,0 +1,1995 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "google/cloud/errors" +require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub" +require "google/cloud/location/rest" + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + module DataTransferService + module Rest + ## + # REST client for the DataTransferService service. + # + # This API allows users to manage their data transfers into BigQuery. + # + class Client + # @private + API_VERSION = "" + + # @private + DEFAULT_ENDPOINT_TEMPLATE = "bigquerydatatransfer.$UNIVERSE_DOMAIN$" + + include Paths + + # @private + attr_reader :data_transfer_service_stub + + ## + # Configure the DataTransferService Client class. + # + # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration} + # for a description of the configuration fields. + # + # @example + # + # # Modify the configuration for all DataTransferService clients + # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.configure do |config| + # config.timeout = 10.0 + # end + # + # @yield [config] Configure the Client client. + # @yieldparam config [Client::Configuration] + # + # @return [Client::Configuration] + # + def self.configure + @configure ||= begin + namespace = ["Google", "Cloud", "Bigquery", "DataTransfer", "V1"] + parent_config = while namespace.any? + parent_name = namespace.join "::" + parent_const = const_get parent_name + break parent_const.configure if parent_const.respond_to? :configure + namespace.pop + end + default_config = Client::Configuration.new parent_config + + default_config.rpcs.get_data_source.timeout = 20.0 + default_config.rpcs.get_data_source.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_data_sources.timeout = 20.0 + default_config.rpcs.list_data_sources.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.create_transfer_config.timeout = 30.0 + + default_config.rpcs.update_transfer_config.timeout = 30.0 + + default_config.rpcs.delete_transfer_config.timeout = 20.0 + default_config.rpcs.delete_transfer_config.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.get_transfer_config.timeout = 20.0 + default_config.rpcs.get_transfer_config.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_transfer_configs.timeout = 20.0 + default_config.rpcs.list_transfer_configs.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.schedule_transfer_runs.timeout = 30.0 + + default_config.rpcs.get_transfer_run.timeout = 20.0 + default_config.rpcs.get_transfer_run.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.delete_transfer_run.timeout = 20.0 + default_config.rpcs.delete_transfer_run.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_transfer_runs.timeout = 20.0 + default_config.rpcs.list_transfer_runs.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.list_transfer_logs.timeout = 20.0 + default_config.rpcs.list_transfer_logs.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config.rpcs.check_valid_creds.timeout = 20.0 + default_config.rpcs.check_valid_creds.retry_policy = { + initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] + } + + default_config + end + yield @configure if block_given? + @configure + end + + ## + # Configure the DataTransferService Client instance. + # + # The configuration is set to the derived mode, meaning that values can be changed, + # but structural changes (adding new fields, etc.) are not allowed. Structural changes + # should be made on {Client.configure}. + # + # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration} + # for a description of the configuration fields. + # + # @yield [config] Configure the Client client. + # @yieldparam config [Client::Configuration] + # + # @return [Client::Configuration] + # + def configure + yield @config if block_given? + @config + end + + ## + # The effective universe domain + # + # @return [String] + # + def universe_domain + @data_transfer_service_stub.universe_domain + end + + ## + # Create a new DataTransferService REST client object. + # + # @example + # + # # Create a client using the default configuration + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a client using a custom configuration + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + # config.timeout = 10.0 + # end + # + # @yield [config] Configure the DataTransferService client. + # @yieldparam config [Client::Configuration] + # + def initialize + # Create the configuration object + @config = Configuration.new Client.configure + + # Yield the configuration if needed + yield @config if block_given? + + # Create credentials + credentials = @config.credentials + # Use self-signed JWT if the endpoint is unchanged from default, + # but only if the default endpoint does not have a region prefix. + enable_self_signed_jwt = @config.endpoint.nil? || + (@config.endpoint == Configuration::DEFAULT_ENDPOINT && + !@config.endpoint.split(".").first.include?("-")) + credentials ||= Credentials.default scope: @config.scope, + enable_self_signed_jwt: enable_self_signed_jwt + if credentials.is_a?(::String) || credentials.is_a?(::Hash) + credentials = Credentials.new credentials, scope: @config.scope + end + + @quota_project_id = @config.quota_project + @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id + + @data_transfer_service_stub = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.new( + endpoint: @config.endpoint, + endpoint_template: DEFAULT_ENDPOINT_TEMPLATE, + universe_domain: @config.universe_domain, + credentials: credentials + ) + + @location_client = Google::Cloud::Location::Locations::Rest::Client.new do |config| + config.credentials = credentials + config.quota_project = @quota_project_id + config.endpoint = @data_transfer_service_stub.endpoint + config.universe_domain = @data_transfer_service_stub.universe_domain + config.bindings_override = @config.bindings_override + end + end + + ## + # Get the associated client for mix-in of the Locations. + # + # @return [Google::Cloud::Location::Locations::Rest::Client] + # + attr_reader :location_client + + # Service calls + + ## + # Retrieves a supported data source and returns its settings. + # + # @overload get_data_source(request, options = nil) + # Pass arguments to `get_data_source` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload get_data_source(name: nil) + # Pass arguments to `get_data_source` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/dataSources/{data_source_id}` or + # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new + # + # # Call the get_data_source method. + # result = client.get_data_source request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource. + # p result + # + def get_data_source request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.get_data_source.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.get_data_source.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.get_data_source.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.get_data_source request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Lists supported data sources and returns their settings. + # + # @overload list_data_sources(request, options = nil) + # Pass arguments to `list_data_sources` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload list_data_sources(parent: nil, page_token: nil, page_size: nil) + # Pass arguments to `list_data_sources` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. The BigQuery project id for which data sources should be + # returned. Must be in the form: `projects/{project_id}` or + # `projects/{project_id}/locations/{location_id}` + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListDataSourcesRequest` list results. For multiple-page + # results, `ListDataSourcesResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new + # + # # Call the list_data_sources method. + # result = client.list_data_sources request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource. + # p item + # end + # + def list_data_sources request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.list_data_sources.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.list_data_sources.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.list_data_sources.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.list_data_sources request, options do |result, operation| + result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_data_sources, "data_sources", request, result, options + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Creates a new data transfer configuration. + # + # @overload create_transfer_config(request, options = nil) + # Pass arguments to `create_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload create_transfer_config(parent: nil, transfer_config: nil, authorization_code: nil, version_info: nil, service_account_name: nil) + # Pass arguments to `create_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. The BigQuery project id where the transfer configuration should + # be created. Must be in the format + # projects/\\{project_id}/locations/\\{location_id} or projects/\\{project_id}. If + # specified location and location of the destination bigquery dataset do not + # match - the request will fail. + # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] + # Required. Data transfer configuration to create. + # @param authorization_code [::String] + # Deprecated: Authorization code was required when + # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used + # in any data sources. Use `version_info` instead. + # + # Optional OAuth2 authorization code to use with this transfer configuration. + # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' + # and new credentials are needed, as indicated by `CheckValidCreds`. In order + # to obtain authorization_code, make a request to the following URL: + #
+                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+                #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # create the transfer config. + # @param version_info [::String] + # Optional version info. This parameter replaces `authorization_code` which + # is no longer used in any data sources. This is required only if + # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials + # are needed, as indicated by `CheckValidCreds`. In order to obtain version + # info, make a request to the following URL: + #
+                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+                #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # create the transfer config. + # @param service_account_name [::String] + # Optional service account email. If this field is set, the transfer config + # will be created with this service account's credentials. It requires that + # the requesting user calling this API has permissions to act as this service + # account. + # + # Note that not all data sources support service account credentials when + # creating a transfer config. For the latest list of data sources, read about + # [using service + # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new + # + # # Call the create_transfer_config method. + # result = client.create_transfer_config request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p result + # + def create_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.create_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.create_transfer_config.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.create_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.create_transfer_config request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Updates a data transfer configuration. + # All fields must be set, even if they are not updated. + # + # @overload update_transfer_config(request, options = nil) + # Pass arguments to `update_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload update_transfer_config(transfer_config: nil, authorization_code: nil, update_mask: nil, version_info: nil, service_account_name: nil) + # Pass arguments to `update_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] + # Required. Data transfer configuration to create. + # @param authorization_code [::String] + # Deprecated: Authorization code was required when + # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used + # in any data sources. Use `version_info` instead. + # + # Optional OAuth2 authorization code to use with this transfer configuration. + # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' + # and new credentials are needed, as indicated by `CheckValidCreds`. In order + # to obtain authorization_code, make a request to the following URL: + #
+                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+                #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # update the transfer config. + # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] + # Required. Required list of fields to be updated in this request. + # @param version_info [::String] + # Optional version info. This parameter replaces `authorization_code` which + # is no longer used in any data sources. This is required only if + # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials + # are needed, as indicated by `CheckValidCreds`. In order to obtain version + # info, make a request to the following URL: + #
+                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+                #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # update the transfer config. + # @param service_account_name [::String] + # Optional service account email. If this field is set, the transfer config + # will be created with this service account's credentials. It requires that + # the requesting user calling this API has permissions to act as this service + # account. + # + # Note that not all data sources support service account credentials when + # creating a transfer config. For the latest list of data sources, read about + # [using service + # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new + # + # # Call the update_transfer_config method. + # result = client.update_transfer_config request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p result + # + def update_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.update_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.update_transfer_config.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.update_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.update_transfer_config request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Deletes a data transfer configuration, including any associated transfer + # runs and logs. + # + # @overload delete_transfer_config(request, options = nil) + # Pass arguments to `delete_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload delete_transfer_config(name: nil) + # Pass arguments to `delete_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new + # + # # Call the delete_transfer_config method. + # result = client.delete_transfer_config request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def delete_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.delete_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.delete_transfer_config.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.delete_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.delete_transfer_config request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about a data transfer config. + # + # @overload get_transfer_config(request, options = nil) + # Pass arguments to `get_transfer_config` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload get_transfer_config(name: nil) + # Pass arguments to `get_transfer_config` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new + # + # # Call the get_transfer_config method. + # result = client.get_transfer_config request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p result + # + def get_transfer_config request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.get_transfer_config.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.get_transfer_config.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.get_transfer_config.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.get_transfer_config request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about all transfer configs owned by a project in the + # specified location. + # + # @overload list_transfer_configs(request, options = nil) + # Pass arguments to `list_transfer_configs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload list_transfer_configs(parent: nil, data_source_ids: nil, page_token: nil, page_size: nil) + # Pass arguments to `list_transfer_configs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. The BigQuery project id for which transfer configs + # should be returned: `projects/{project_id}` or + # `projects/{project_id}/locations/{location_id}` + # @param data_source_ids [::Array<::String>] + # When specified, only configurations of requested data sources are returned. + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransfersRequest` list results. For multiple-page + # results, `ListTransfersResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new + # + # # Call the list_transfer_configs method. + # result = client.list_transfer_configs request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + # p item + # end + # + def list_transfer_configs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.list_transfer_configs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.list_transfer_configs.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.list_transfer_configs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.list_transfer_configs request, options do |result, operation| + result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_configs, "transfer_configs", request, result, options + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Creates transfer runs for a time range [start_time, end_time]. + # For each date - or whatever granularity the data source supports - in the + # range, one transfer run is created. + # Note that runs are created per UTC time in the time range. + # DEPRECATED: use StartManualTransferRuns instead. + # + # @overload schedule_transfer_runs(request, options = nil) + # Pass arguments to `schedule_transfer_runs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload schedule_transfer_runs(parent: nil, start_time: nil, end_time: nil) + # Pass arguments to `schedule_transfer_runs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Transfer configuration name in the form: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @param start_time [::Google::Protobuf::Timestamp, ::Hash] + # Required. Start time of the range of transfer runs. For example, + # `"2017-05-25T00:00:00+00:00"`. + # @param end_time [::Google::Protobuf::Timestamp, ::Hash] + # Required. End time of the range of transfer runs. For example, + # `"2017-05-30T00:00:00+00:00"`. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new + # + # # Call the schedule_transfer_runs method. + # result = client.schedule_transfer_runs request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse. + # p result + # + def schedule_transfer_runs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.schedule_transfer_runs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.schedule_transfer_runs.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.schedule_transfer_runs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.schedule_transfer_runs request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Start manual transfer runs to be executed now with schedule_time equal to + # current time. The transfer runs can be created for a time range where the + # run_time is between start_time (inclusive) and end_time (exclusive), or for + # a specific run_time. + # + # @overload start_manual_transfer_runs(request, options = nil) + # Pass arguments to `start_manual_transfer_runs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload start_manual_transfer_runs(parent: nil, requested_time_range: nil, requested_run_time: nil) + # Pass arguments to `start_manual_transfer_runs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Transfer configuration name in the form: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @param requested_time_range [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange, ::Hash] + # A time_range start and end timestamp for historical data files or reports + # that are scheduled to be transferred by the scheduled transfer run. + # requested_time_range must be a past time and cannot include future time + # values. + # @param requested_run_time [::Google::Protobuf::Timestamp, ::Hash] + # A run_time timestamp for historical data files or reports + # that are scheduled to be transferred by the scheduled transfer run. + # requested_run_time must be a past time and cannot include future time + # values. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new + # + # # Call the start_manual_transfer_runs method. + # result = client.start_manual_transfer_runs request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse. + # p result + # + def start_manual_transfer_runs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.start_manual_transfer_runs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.start_manual_transfer_runs.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.start_manual_transfer_runs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.start_manual_transfer_runs request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about the particular transfer run. + # + # @overload get_transfer_run(request, options = nil) + # Pass arguments to `get_transfer_run` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload get_transfer_run(name: nil) + # Pass arguments to `get_transfer_run` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + # or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new + # + # # Call the get_transfer_run method. + # result = client.get_transfer_run request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. + # p result + # + def get_transfer_run request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.get_transfer_run.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.get_transfer_run.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.get_transfer_run.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.get_transfer_run request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Deletes the specified transfer run. + # + # @overload delete_transfer_run(request, options = nil) + # Pass arguments to `delete_transfer_run` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload delete_transfer_run(name: nil) + # Pass arguments to `delete_transfer_run` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + # or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new + # + # # Call the delete_transfer_run method. + # result = client.delete_transfer_run request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def delete_transfer_run request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.delete_transfer_run.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.delete_transfer_run.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.delete_transfer_run.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.delete_transfer_run request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns information about running and completed transfer runs. + # + # @overload list_transfer_runs(request, options = nil) + # Pass arguments to `list_transfer_runs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload list_transfer_runs(parent: nil, states: nil, page_token: nil, page_size: nil, run_attempt: nil) + # Pass arguments to `list_transfer_runs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Name of transfer configuration for which transfer runs should be + # retrieved. Format of transfer configuration resource name is: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @param states [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferState>] + # When specified, only transfer runs with requested states are returned. + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransferRunsRequest` list results. For multiple-page + # results, `ListTransferRunsResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @param run_attempt [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt] + # Indicates how run attempts are to be pulled. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new + # + # # Call the list_transfer_runs method. + # result = client.list_transfer_runs request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. + # p item + # end + # + def list_transfer_runs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.list_transfer_runs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.list_transfer_runs.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.list_transfer_runs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.list_transfer_runs request, options do |result, operation| + result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_runs, "transfer_runs", request, result, options + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns log messages for the transfer run. + # + # @overload list_transfer_logs(request, options = nil) + # Pass arguments to `list_transfer_logs` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload list_transfer_logs(parent: nil, page_token: nil, page_size: nil, message_types: nil) + # Pass arguments to `list_transfer_logs` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param parent [::String] + # Required. Transfer run name in the form: + # `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + # @param page_token [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransferLogsRequest` list results. For multiple-page + # results, `ListTransferLogsResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @param page_size [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @param message_types [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>] + # Message types to return. If not populated - INFO, WARNING and ERROR + # messages are returned. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new + # + # # Call the list_transfer_logs method. + # result = client.list_transfer_logs request + # + # # The returned object is of type Gapic::PagedEnumerable. You can iterate + # # over elements, and API calls will be issued to fetch pages as needed. + # result.each do |item| + # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage. + # p item + # end + # + def list_transfer_logs request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.list_transfer_logs.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.list_transfer_logs.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.list_transfer_logs.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.list_transfer_logs request, options do |result, operation| + result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_logs, "transfer_messages", request, result, options + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Returns true if valid credentials exist for the given data source and + # requesting user. + # + # @overload check_valid_creds(request, options = nil) + # Pass arguments to `check_valid_creds` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload check_valid_creds(name: nil) + # Pass arguments to `check_valid_creds` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The data source in the form: + # `projects/{project_id}/dataSources/{data_source_id}` or + # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new + # + # # Call the check_valid_creds method. + # result = client.check_valid_creds request + # + # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse. + # p result + # + def check_valid_creds request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.check_valid_creds.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.check_valid_creds.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.check_valid_creds.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.check_valid_creds request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Enroll data sources in a user project. This allows users to create transfer + # configurations for these data sources. They will also appear in the + # ListDataSources RPC and as such, will appear in the + # [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents + # can be found in the public guide for + # [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and + # [Data Transfer + # Service](https://cloud.google.com/bigquery/docs/working-with-transfers). + # + # @overload enroll_data_sources(request, options = nil) + # Pass arguments to `enroll_data_sources` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload enroll_data_sources(name: nil, data_source_ids: nil) + # Pass arguments to `enroll_data_sources` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The name of the project resource in the form: + # `projects/{project_id}` + # @param data_source_ids [::Array<::String>] + # Data sources that are enrolled. It is required to provide at least one + # data source id. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new + # + # # Call the enroll_data_sources method. + # result = client.enroll_data_sources request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def enroll_data_sources request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.enroll_data_sources.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.enroll_data_sources.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.enroll_data_sources.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.enroll_data_sources request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Unenroll data sources in a user project. This allows users to remove + # transfer configurations for these data sources. They will no longer appear + # in the ListDataSources RPC and will also no longer appear in the [BigQuery + # UI](https://console.cloud.google.com/bigquery). Data transfers + # configurations of unenrolled data sources will not be scheduled. + # + # @overload unenroll_data_sources(request, options = nil) + # Pass arguments to `unenroll_data_sources` via a request object, either of type + # {::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest} or an equivalent Hash. + # + # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, ::Hash] + # A request object representing the call parameters. Required. To specify no + # parameters, or to keep all the default parameter values, pass an empty Hash. + # @param options [::Gapic::CallOptions, ::Hash] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @overload unenroll_data_sources(name: nil, data_source_ids: nil) + # Pass arguments to `unenroll_data_sources` via keyword arguments. Note that at + # least one keyword argument is required. To specify no parameters, or to keep all + # the default parameter values, pass an empty Hash as a request object (see above). + # + # @param name [::String] + # Required. The name of the project resource in the form: + # `projects/{project_id}` + # @param data_source_ids [::Array<::String>] + # Data sources that are unenrolled. It is required to provide at least one + # data source id. + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # + # @raise [::Google::Cloud::Error] if the REST call is aborted. + # + # @example Basic example + # require "google/cloud/bigquery/data_transfer/v1" + # + # # Create a client object. The client can be reused for multiple calls. + # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + # # Create a request. To set request fields, pass in keyword arguments. + # request = Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new + # + # # Call the unenroll_data_sources method. + # result = client.unenroll_data_sources request + # + # # The returned object is of type Google::Protobuf::Empty. + # p result + # + def unenroll_data_sources request, options = nil + raise ::ArgumentError, "request must be provided" if request.nil? + + request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest + + # Converts hash and nil to an options object + options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h + + # Customize the options with defaults + call_metadata = @config.rpcs.unenroll_data_sources.metadata.to_h + + # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers + call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ + lib_name: @config.lib_name, lib_version: @config.lib_version, + gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, + transports_version_send: [:rest] + + call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? + call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id + + options.apply_defaults timeout: @config.rpcs.unenroll_data_sources.timeout, + metadata: call_metadata, + retry_policy: @config.rpcs.unenroll_data_sources.retry_policy + + options.apply_defaults timeout: @config.timeout, + metadata: @config.metadata, + retry_policy: @config.retry_policy + + @data_transfer_service_stub.unenroll_data_sources request, options do |result, operation| + yield result, operation if block_given? + return result + end + rescue ::Gapic::Rest::Error => e + raise ::Google::Cloud::Error.from_error(e) + end + + ## + # Configuration class for the DataTransferService REST API. + # + # This class represents the configuration for DataTransferService REST, + # providing control over timeouts, retry behavior, logging, transport + # parameters, and other low-level controls. Certain parameters can also be + # applied individually to specific RPCs. See + # {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration::Rpcs} + # for a list of RPCs that can be configured independently. + # + # Configuration can be applied globally to all clients, or to a single client + # on construction. + # + # @example + # + # # Modify the global config, setting the timeout for + # # get_data_source to 20 seconds, + # # and all remaining timeouts to 10 seconds. + # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.configure do |config| + # config.timeout = 10.0 + # config.rpcs.get_data_source.timeout = 20.0 + # end + # + # # Apply the above configuration only to a new client. + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + # config.timeout = 10.0 + # config.rpcs.get_data_source.timeout = 20.0 + # end + # + # @!attribute [rw] endpoint + # A custom service endpoint, as a hostname or hostname:port. The default is + # nil, indicating to use the default endpoint in the current universe domain. + # @return [::String,nil] + # @!attribute [rw] credentials + # Credentials to send with calls. You may provide any of the following types: + # * (`String`) The path to a service account key file in JSON format + # * (`Hash`) A service account key as a Hash + # * (`Google::Auth::Credentials`) A googleauth credentials object + # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials)) + # * (`Signet::OAuth2::Client`) A signet oauth2 client object + # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client)) + # * (`nil`) indicating no credentials + # @return [::Object] + # @!attribute [rw] scope + # The OAuth scopes + # @return [::Array<::String>] + # @!attribute [rw] lib_name + # The library name as recorded in instrumentation and logging + # @return [::String] + # @!attribute [rw] lib_version + # The library version as recorded in instrumentation and logging + # @return [::String] + # @!attribute [rw] timeout + # The call timeout in seconds. + # @return [::Numeric] + # @!attribute [rw] metadata + # Additional headers to be sent with the call. + # @return [::Hash{::Symbol=>::String}] + # @!attribute [rw] retry_policy + # The retry policy. The value is a hash with the following keys: + # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. + # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. + # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. + # * `:retry_codes` (*type:* `Array`) - The error codes that should + # trigger a retry. + # @return [::Hash] + # @!attribute [rw] quota_project + # A separate project against which to charge quota. + # @return [::String] + # @!attribute [rw] universe_domain + # The universe domain within which to make requests. This determines the + # default endpoint URL. The default value of nil uses the environment + # universe (usually the default "googleapis.com" universe). + # @return [::String,nil] + # + class Configuration + extend ::Gapic::Config + + # @private + # The endpoint specific to the default "googleapis.com" universe. Deprecated. + DEFAULT_ENDPOINT = "bigquerydatatransfer.googleapis.com" + + config_attr :endpoint, nil, ::String, nil + config_attr :credentials, nil do |value| + allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil] + allowed.any? { |klass| klass === value } + end + config_attr :scope, nil, ::String, ::Array, nil + config_attr :lib_name, nil, ::String, nil + config_attr :lib_version, nil, ::String, nil + config_attr :timeout, nil, ::Numeric, nil + config_attr :metadata, nil, ::Hash, nil + config_attr :retry_policy, nil, ::Hash, ::Proc, nil + config_attr :quota_project, nil, ::String, nil + config_attr :universe_domain, nil, ::String, nil + + # @private + # Overrides for http bindings for the RPCs of this service + # are only used when this service is used as mixin, and only + # by the host service. + # @return [::Hash{::Symbol=>::Array<::Gapic::Rest::GrpcTranscoder::HttpBinding>}] + config_attr :bindings_override, {}, ::Hash, nil + + # @private + def initialize parent_config = nil + @parent_config = parent_config unless parent_config.nil? + + yield self if block_given? + end + + ## + # Configurations for individual RPCs + # @return [Rpcs] + # + def rpcs + @rpcs ||= begin + parent_rpcs = nil + parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs) + Rpcs.new parent_rpcs + end + end + + ## + # Configuration RPC class for the DataTransferService API. + # + # Includes fields providing the configuration for each RPC in this service. + # Each configuration object is of type `Gapic::Config::Method` and includes + # the following configuration fields: + # + # * `timeout` (*type:* `Numeric`) - The call timeout in seconds + # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional headers + # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields + # include the following keys: + # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. + # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. + # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. + # * `:retry_codes` (*type:* `Array`) - The error codes that should + # trigger a retry. + # + class Rpcs + ## + # RPC-specific configuration for `get_data_source` + # @return [::Gapic::Config::Method] + # + attr_reader :get_data_source + ## + # RPC-specific configuration for `list_data_sources` + # @return [::Gapic::Config::Method] + # + attr_reader :list_data_sources + ## + # RPC-specific configuration for `create_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :create_transfer_config + ## + # RPC-specific configuration for `update_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :update_transfer_config + ## + # RPC-specific configuration for `delete_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :delete_transfer_config + ## + # RPC-specific configuration for `get_transfer_config` + # @return [::Gapic::Config::Method] + # + attr_reader :get_transfer_config + ## + # RPC-specific configuration for `list_transfer_configs` + # @return [::Gapic::Config::Method] + # + attr_reader :list_transfer_configs + ## + # RPC-specific configuration for `schedule_transfer_runs` + # @return [::Gapic::Config::Method] + # + attr_reader :schedule_transfer_runs + ## + # RPC-specific configuration for `start_manual_transfer_runs` + # @return [::Gapic::Config::Method] + # + attr_reader :start_manual_transfer_runs + ## + # RPC-specific configuration for `get_transfer_run` + # @return [::Gapic::Config::Method] + # + attr_reader :get_transfer_run + ## + # RPC-specific configuration for `delete_transfer_run` + # @return [::Gapic::Config::Method] + # + attr_reader :delete_transfer_run + ## + # RPC-specific configuration for `list_transfer_runs` + # @return [::Gapic::Config::Method] + # + attr_reader :list_transfer_runs + ## + # RPC-specific configuration for `list_transfer_logs` + # @return [::Gapic::Config::Method] + # + attr_reader :list_transfer_logs + ## + # RPC-specific configuration for `check_valid_creds` + # @return [::Gapic::Config::Method] + # + attr_reader :check_valid_creds + ## + # RPC-specific configuration for `enroll_data_sources` + # @return [::Gapic::Config::Method] + # + attr_reader :enroll_data_sources + ## + # RPC-specific configuration for `unenroll_data_sources` + # @return [::Gapic::Config::Method] + # + attr_reader :unenroll_data_sources + + # @private + def initialize parent_rpcs = nil + get_data_source_config = parent_rpcs.get_data_source if parent_rpcs.respond_to? :get_data_source + @get_data_source = ::Gapic::Config::Method.new get_data_source_config + list_data_sources_config = parent_rpcs.list_data_sources if parent_rpcs.respond_to? :list_data_sources + @list_data_sources = ::Gapic::Config::Method.new list_data_sources_config + create_transfer_config_config = parent_rpcs.create_transfer_config if parent_rpcs.respond_to? :create_transfer_config + @create_transfer_config = ::Gapic::Config::Method.new create_transfer_config_config + update_transfer_config_config = parent_rpcs.update_transfer_config if parent_rpcs.respond_to? :update_transfer_config + @update_transfer_config = ::Gapic::Config::Method.new update_transfer_config_config + delete_transfer_config_config = parent_rpcs.delete_transfer_config if parent_rpcs.respond_to? :delete_transfer_config + @delete_transfer_config = ::Gapic::Config::Method.new delete_transfer_config_config + get_transfer_config_config = parent_rpcs.get_transfer_config if parent_rpcs.respond_to? :get_transfer_config + @get_transfer_config = ::Gapic::Config::Method.new get_transfer_config_config + list_transfer_configs_config = parent_rpcs.list_transfer_configs if parent_rpcs.respond_to? :list_transfer_configs + @list_transfer_configs = ::Gapic::Config::Method.new list_transfer_configs_config + schedule_transfer_runs_config = parent_rpcs.schedule_transfer_runs if parent_rpcs.respond_to? :schedule_transfer_runs + @schedule_transfer_runs = ::Gapic::Config::Method.new schedule_transfer_runs_config + start_manual_transfer_runs_config = parent_rpcs.start_manual_transfer_runs if parent_rpcs.respond_to? :start_manual_transfer_runs + @start_manual_transfer_runs = ::Gapic::Config::Method.new start_manual_transfer_runs_config + get_transfer_run_config = parent_rpcs.get_transfer_run if parent_rpcs.respond_to? :get_transfer_run + @get_transfer_run = ::Gapic::Config::Method.new get_transfer_run_config + delete_transfer_run_config = parent_rpcs.delete_transfer_run if parent_rpcs.respond_to? :delete_transfer_run + @delete_transfer_run = ::Gapic::Config::Method.new delete_transfer_run_config + list_transfer_runs_config = parent_rpcs.list_transfer_runs if parent_rpcs.respond_to? :list_transfer_runs + @list_transfer_runs = ::Gapic::Config::Method.new list_transfer_runs_config + list_transfer_logs_config = parent_rpcs.list_transfer_logs if parent_rpcs.respond_to? :list_transfer_logs + @list_transfer_logs = ::Gapic::Config::Method.new list_transfer_logs_config + check_valid_creds_config = parent_rpcs.check_valid_creds if parent_rpcs.respond_to? :check_valid_creds + @check_valid_creds = ::Gapic::Config::Method.new check_valid_creds_config + enroll_data_sources_config = parent_rpcs.enroll_data_sources if parent_rpcs.respond_to? :enroll_data_sources + @enroll_data_sources = ::Gapic::Config::Method.new enroll_data_sources_config + unenroll_data_sources_config = parent_rpcs.unenroll_data_sources if parent_rpcs.respond_to? :unenroll_data_sources + @unenroll_data_sources = ::Gapic::Config::Method.new unenroll_data_sources_config + + yield self if block_given? + end + end + end + end + end + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb new file mode 100644 index 000000000000..26ea0d750773 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb @@ -0,0 +1,1133 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + module DataTransferService + module Rest + ## + # REST service stub for the DataTransferService service. + # Service stub contains baseline method implementations + # including transcoding, making the REST call, and deserialing the response. + # + class ServiceStub + def initialize endpoint:, endpoint_template:, universe_domain:, credentials: + # These require statements are intentionally placed here to initialize + # the REST modules only when it's required. + require "gapic/rest" + + @client_stub = ::Gapic::Rest::ClientStub.new endpoint: endpoint, + endpoint_template: endpoint_template, + universe_domain: universe_domain, + credentials: credentials, + numeric_enums: true, + raise_faraday_errors: false + end + + ## + # The effective universe domain + # + # @return [String] + # + def universe_domain + @client_stub.universe_domain + end + + ## + # The effective endpoint + # + # @return [String] + # + def endpoint + @client_stub.endpoint + end + + ## + # Baseline implementation for the get_data_source REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] + # A result object deserialized from the server's reply + def get_data_source request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_get_data_source_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the list_data_sources REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse] + # A result object deserialized from the server's reply + def list_data_sources request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_list_data_sources_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the create_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # A result object deserialized from the server's reply + def create_transfer_config request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_create_transfer_config_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the update_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # A result object deserialized from the server's reply + def update_transfer_config request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_update_transfer_config_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the delete_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # A result object deserialized from the server's reply + def delete_transfer_config request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_delete_transfer_config_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the get_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # A result object deserialized from the server's reply + def get_transfer_config request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_get_transfer_config_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the list_transfer_configs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse] + # A result object deserialized from the server's reply + def list_transfer_configs request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_list_transfer_configs_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the schedule_transfer_runs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] + # A result object deserialized from the server's reply + def schedule_transfer_runs request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_schedule_transfer_runs_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the start_manual_transfer_runs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] + # A result object deserialized from the server's reply + def start_manual_transfer_runs request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_start_manual_transfer_runs_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the get_transfer_run REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] + # A result object deserialized from the server's reply + def get_transfer_run request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_get_transfer_run_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the delete_transfer_run REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # A result object deserialized from the server's reply + def delete_transfer_run request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_delete_transfer_run_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the list_transfer_runs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse] + # A result object deserialized from the server's reply + def list_transfer_runs request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_list_transfer_runs_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the list_transfer_logs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse] + # A result object deserialized from the server's reply + def list_transfer_logs request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_list_transfer_logs_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the check_valid_creds REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] + # A result object deserialized from the server's reply + def check_valid_creds request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_check_valid_creds_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the enroll_data_sources REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # A result object deserialized from the server's reply + def enroll_data_sources request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_enroll_data_sources_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # Baseline implementation for the unenroll_data_sources REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest] + # A request object representing the call parameters. Required. + # @param options [::Gapic::CallOptions] + # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. + # + # @yield [result, operation] Access the result along with the TransportOperation object + # @yieldparam result [::Google::Protobuf::Empty] + # @yieldparam operation [::Gapic::Rest::TransportOperation] + # + # @return [::Google::Protobuf::Empty] + # A result object deserialized from the server's reply + def unenroll_data_sources request_pb, options = nil + raise ::ArgumentError, "request must be provided" if request_pb.nil? + + verb, uri, query_string_params, body = ServiceStub.transcode_unenroll_data_sources_request request_pb + query_string_params = if query_string_params.any? + query_string_params.to_h { |p| p.split "=", 2 } + else + {} + end + + response = @client_stub.make_http_request( + verb, + uri: uri, + body: body || "", + params: query_string_params, + options: options + ) + operation = ::Gapic::Rest::TransportOperation.new response + result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true + + yield result, operation if block_given? + result + end + + ## + # @private + # + # GRPC transcoding helper method for the get_data_source REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_get_data_source_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :get, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/dataSources/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :get, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/dataSources/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the list_data_sources REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_list_data_sources_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/dataSources", + matches: [ + ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/dataSources", + matches: [ + ["parent", %r{^projects/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the create_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_create_transfer_config_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :post, + uri_template: "/v1/{parent}/transferConfigs", + body: "transfer_config", + matches: [ + ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :post, + uri_template: "/v1/{parent}/transferConfigs", + body: "transfer_config", + matches: [ + ["parent", %r{^projects/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the update_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_update_transfer_config_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :patch, + uri_template: "/v1/{transfer_config.name}", + body: "transfer_config", + matches: [ + ["transfer_config.name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :patch, + uri_template: "/v1/{transfer_config.name}", + body: "transfer_config", + matches: [ + ["transfer_config.name", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the delete_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_delete_transfer_config_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :delete, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :delete, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the get_transfer_config REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_get_transfer_config_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :get, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :get, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the list_transfer_configs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_list_transfer_configs_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/transferConfigs", + matches: [ + ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/transferConfigs", + matches: [ + ["parent", %r{^projects/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the schedule_transfer_runs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_schedule_transfer_runs_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :post, + uri_template: "/v1/{parent}:scheduleRuns", + body: "*", + matches: [ + ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :post, + uri_template: "/v1/{parent}:scheduleRuns", + body: "*", + matches: [ + ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the start_manual_transfer_runs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_start_manual_transfer_runs_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :post, + uri_template: "/v1/{parent}:startManualRuns", + body: "*", + matches: [ + ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :post, + uri_template: "/v1/{parent}:startManualRuns", + body: "*", + matches: [ + ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the get_transfer_run REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_get_transfer_run_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :get, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :get, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the delete_transfer_run REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_delete_transfer_run_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :delete, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :delete, + uri_template: "/v1/{name}", + matches: [ + ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the list_transfer_runs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_list_transfer_runs_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/runs", + matches: [ + ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/runs", + matches: [ + ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the list_transfer_logs REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_list_transfer_logs_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/transferLogs", + matches: [ + ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :get, + uri_template: "/v1/{parent}/transferLogs", + matches: [ + ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the check_valid_creds REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_check_valid_creds_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :post, + uri_template: "/v1/{name}:checkValidCreds", + body: "*", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/dataSources/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :post, + uri_template: "/v1/{name}:checkValidCreds", + body: "*", + matches: [ + ["name", %r{^projects/[^/]+/dataSources/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the enroll_data_sources REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_enroll_data_sources_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :post, + uri_template: "/v1/{name}:enrollDataSources", + body: "*", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/?$}, false] + ] + ) + .with_bindings( + uri_method: :post, + uri_template: "/v1/{name}:enrollDataSources", + body: "*", + matches: [ + ["name", %r{^projects/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + + ## + # @private + # + # GRPC transcoding helper method for the unenroll_data_sources REST call + # + # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest] + # A request object representing the call parameters. Required. + # @return [Array(String, [String, nil], Hash{String => String})] + # Uri, Body, Query string parameters + def self.transcode_unenroll_data_sources_request request_pb + transcoder = Gapic::Rest::GrpcTranscoder.new + .with_bindings( + uri_method: :post, + uri_template: "/v1/{name}:unenrollDataSources", + body: "*", + matches: [ + ["name", %r{^projects/[^/]+/locations/[^/]+/?$}, false] + ] + ) + transcoder.transcode request_pb + end + end + end + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb new file mode 100644 index 000000000000..57c28f8adc68 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" +require "google/cloud/bigquery/data_transfer/v1/bindings_override" +require "google/cloud/bigquery/data_transfer/v1/version" + +module Google + module Cloud + module Bigquery + module DataTransfer + ## + # To load just the REST part of this package, including all its services, and instantiate a REST client: + # + # @example + # + # require "google/cloud/bigquery/data_transfer/v1/rest" + # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new + # + module V1 + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb new file mode 100644 index 000000000000..c65948a1833c --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + VERSION = "0.0.1" + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb new file mode 100644 index 000000000000..c01dc686f687 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb @@ -0,0 +1,89 @@ +# frozen_string_literal: true +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto + +require 'google/protobuf' + +require 'google/api/annotations_pb' +require 'google/api/client_pb' +require 'google/api/field_behavior_pb' +require 'google/api/resource_pb' +require 'google/cloud/bigquery/datatransfer/v1/transfer_pb' +require 'google/protobuf/duration_pb' +require 'google/protobuf/empty_pb' +require 'google/protobuf/field_mask_pb' +require 'google/protobuf/timestamp_pb' +require 'google/protobuf/wrappers_pb' + + +descriptor_data = "\n8google/cloud/bigquery/datatransfer/v1/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x34google/cloud/bigquery/datatransfer/v1/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x8f\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08\"s\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06\x12\x08\n\x04LIST\x10\x07\"\x9c\t\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x8a\x01\n\x11\x41uthorizationType\x12\"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12\"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02\x12\x15\n\x11\x46IRST_PARTY_OAUTH\x10\x03\"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:\xa5\x01\xea\x41\xa1\x01\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}\x12\x41projects/{project}/locations/{location}/dataSources/{data_source}\"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource\"\x87\x01\n\x16ListDataSourcesRequest\x12\x46\n\x06parent\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\x12.bigquerydatatransfer.googleapis.com/DataSource\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"\x92\x02\n\x1b\x43reateTransferConfigRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1e\n\x12\x61uthorization_code\x18\x03 \x01(\tB\x02\x18\x01\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t\"\xfc\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1e\n\x12\x61uthorization_code\x18\x03 \x01(\tB\x02\x18\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t\"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\"V\n\x15GetTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\"Y\n\x18\x44\x65leteTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\"\xa8\x01\n\x1aListTransferConfigsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"\xde\x02\n\x17ListTransferRunsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\x12\'bigquerydatatransfer.googleapis.com/Run\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt\"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01\"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"\xe0\x01\n\x17ListTransferLogsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource\"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08\"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\"\x8a\x03\n\x1eStartManualTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time\"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\"F\n\x18\x45nrollDataSourcesRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\"H\n\x1aUnenrollDataSourcesRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t2\xd5\"\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource\"e\xda\x41\x04name\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse\"g\xda\x41\x06parent\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\"\xa2\x01\xda\x41\x16parent,transfer_config\x82\xd3\xe4\x93\x02\x82\x01\"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:\"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\"\xc7\x01\xda\x41\x1btransfer_config,update_mask\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty\"m\xda\x41\x04name\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\"m\xda\x41\x04name\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse\"o\xda\x41\x06parent\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse\"\xab\x01\x88\x02\x01\xda\x41\x1aparent,start_time,end_time\x82\xd3\xe4\x93\x02\x84\x01\"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;\"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse\"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01\"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>\"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\"{\xda\x41\x04name\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty\"{\xda\x41\x04name\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse\"}\xda\x41\x06parent\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse\"\x9c\x01\xda\x41\x06parent\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse\"\x8b\x01\xda\x41\x04name\x82\xd3\xe4\x93\x02~\"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8\"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\x12\xda\x01\n\x11\x45nrollDataSources\x12?.google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest\x1a\x16.google.protobuf.Empty\"l\x82\xd3\xe4\x93\x02\x66\"3/v1/{name=projects/*/locations/*}:enrollDataSources:\x01*Z,\"\'/v1/{name=projects/*}:enrollDataSources:\x01*\x12\xb2\x01\n\x13UnenrollDataSources\x12\x41.google.cloud.bigquery.datatransfer.v1.UnenrollDataSourcesRequest\x1a\x16.google.protobuf.Empty\"@\x82\xd3\xe4\x93\x02:\"5/v1/{name=projects/*/locations/*}:unenrollDataSources:\x01*\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x02\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZMcloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb;datatransferpb\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3" + +pool = Google::Protobuf::DescriptorPool.generated_pool + +begin + pool.add_serialized_file(descriptor_data) +rescue TypeError + # Compatibility code: will be removed in the next major version. + require 'google/protobuf/descriptor_pb' + parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data) + parsed.clear_dependency + serialized = parsed.class.encode(parsed) + file = pool.add_serialized_file(serialized) + warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}" + imports = [ + ["google.protobuf.DoubleValue", "google/protobuf/wrappers.proto"], + ["google.protobuf.Duration", "google/protobuf/duration.proto"], + ["google.cloud.bigquery.datatransfer.v1.TransferConfig", "google/cloud/bigquery/datatransfer/v1/transfer.proto"], + ["google.protobuf.FieldMask", "google/protobuf/field_mask.proto"], + ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"], + ] + imports.each do |type_name, expected_filename| + import_file = pool.lookup(type_name).file_descriptor + if import_file.name != expected_filename + warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}" + end + end + warn "Each proto file must use a consistent fully-qualified name." + warn "This will become an error in the next major version." +end + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + DataSourceParameter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter").msgclass + DataSourceParameter::Type = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type").enummodule + DataSource = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource").msgclass + DataSource::AuthorizationType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType").enummodule + DataSource::DataRefreshType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType").enummodule + GetDataSourceRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest").msgclass + ListDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest").msgclass + ListDataSourcesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse").msgclass + CreateTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest").msgclass + UpdateTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest").msgclass + GetTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest").msgclass + DeleteTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest").msgclass + GetTransferRunRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest").msgclass + DeleteTransferRunRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest").msgclass + ListTransferConfigsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest").msgclass + ListTransferConfigsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse").msgclass + ListTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest").msgclass + ListTransferRunsRequest::RunAttempt = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt").enummodule + ListTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse").msgclass + ListTransferLogsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest").msgclass + ListTransferLogsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse").msgclass + CheckValidCredsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest").msgclass + CheckValidCredsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse").msgclass + ScheduleTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest").msgclass + ScheduleTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse").msgclass + StartManualTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest").msgclass + StartManualTransferRunsRequest::TimeRange = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange").msgclass + StartManualTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse").msgclass + EnrollDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest").msgclass + UnenrollDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UnenrollDataSourcesRequest").msgclass + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb new file mode 100644 index 000000000000..a6e2ee48a2e0 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb @@ -0,0 +1,99 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# Source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto for package 'Google.Cloud.Bigquery.DataTransfer.V1' +# Original file comments: +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +require 'grpc' +require 'google/cloud/bigquery/datatransfer/v1/datatransfer_pb' + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + module DataTransferService + # This API allows users to manage their data transfers into BigQuery. + class Service + + include ::GRPC::GenericService + + self.marshal_class_method = :encode + self.unmarshal_class_method = :decode + self.service_name = 'google.cloud.bigquery.datatransfer.v1.DataTransferService' + + # Retrieves a supported data source and returns its settings. + rpc :GetDataSource, ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource + # Lists supported data sources and returns their settings. + rpc :ListDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse + # Creates a new data transfer configuration. + rpc :CreateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig + # Updates a data transfer configuration. + # All fields must be set, even if they are not updated. + rpc :UpdateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig + # Deletes a data transfer configuration, including any associated transfer + # runs and logs. + rpc :DeleteTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Google::Protobuf::Empty + # Returns information about a data transfer config. + rpc :GetTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig + # Returns information about all transfer configs owned by a project in the + # specified location. + rpc :ListTransferConfigs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse + # Creates transfer runs for a time range [start_time, end_time]. + # For each date - or whatever granularity the data source supports - in the + # range, one transfer run is created. + # Note that runs are created per UTC time in the time range. + # DEPRECATED: use StartManualTransferRuns instead. + rpc :ScheduleTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse + # Start manual transfer runs to be executed now with schedule_time equal to + # current time. The transfer runs can be created for a time range where the + # run_time is between start_time (inclusive) and end_time (exclusive), or for + # a specific run_time. + rpc :StartManualTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse + # Returns information about the particular transfer run. + rpc :GetTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun + # Deletes the specified transfer run. + rpc :DeleteTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Google::Protobuf::Empty + # Returns information about running and completed transfer runs. + rpc :ListTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse + # Returns log messages for the transfer run. + rpc :ListTransferLogs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse + # Returns true if valid credentials exist for the given data source and + # requesting user. + rpc :CheckValidCreds, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse + # Enroll data sources in a user project. This allows users to create transfer + # configurations for these data sources. They will also appear in the + # ListDataSources RPC and as such, will appear in the + # [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents + # can be found in the public guide for + # [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and + # [Data Transfer + # Service](https://cloud.google.com/bigquery/docs/working-with-transfers). + rpc :EnrollDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Google::Protobuf::Empty + # Unenroll data sources in a user project. This allows users to remove + # transfer configurations for these data sources. They will no longer appear + # in the ListDataSources RPC and will also no longer appear in the [BigQuery + # UI](https://console.cloud.google.com/bigquery). Data transfers + # configurations of unenrolled data sources will not be scheduled. + rpc :UnenrollDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, ::Google::Protobuf::Empty + end + + Stub = Service.rpc_stub_class + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb new file mode 100644 index 000000000000..2572e94d32a4 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/bigquery/datatransfer/v1/transfer.proto + +require 'google/protobuf' + +require 'google/api/field_behavior_pb' +require 'google/api/resource_pb' +require 'google/protobuf/struct_pb' +require 'google/protobuf/timestamp_pb' +require 'google/protobuf/wrappers_pb' +require 'google/rpc/status_pb' + + +descriptor_data = "\n4google/cloud/bigquery/datatransfer/v1/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08\"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xa7\x02\n\x11ScheduleOptionsV2\x12W\n\x13time_based_schedule\x18\x01 \x01(\x0b\x32\x38.google.cloud.bigquery.datatransfer.v1.TimeBasedScheduleH\x00\x12P\n\x0fmanual_schedule\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.ManualScheduleH\x00\x12[\n\x15\x65vent_driven_schedule\x18\x03 \x01(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.EventDrivenScheduleH\x00\x42\n\n\x08schedule\"\x83\x01\n\x11TimeBasedSchedule\x12\x10\n\x08schedule\x18\x01 \x01(\t\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x10\n\x0eManualSchedule\"2\n\x13\x45ventDrivenSchedule\x12\x1b\n\x13pubsub_subscription\x18\x01 \x01(\t\"(\n\x08UserInfo\x12\x12\n\x05\x65mail\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_email\"\x9b\t\n\x0eTransferConfig\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12U\n\x13schedule_options_v2\x18\x1f \x01(\x0b\x32\x38.google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences\x12M\n\nowner_info\x18\x1b \x01(\x0b\x32/.google.cloud.bigquery.datatransfer.v1.UserInfoB\x03\xe0\x41\x03H\x01\x88\x01\x01\x12`\n\x18\x65ncryption_configuration\x18\x1c \x01(\x0b\x32>.google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration\x12&\n\x05\x65rror\x18 \x01(\x0b\x32\x12.google.rpc.StatusB\x03\xe0\x41\x03:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stinationB\r\n\x0b_owner_info\"M\n\x17\x45ncryptionConfiguration\x12\x32\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xff\x06\n\x0bTransferRun\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination\"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t\"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\x8f\x02\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZMcloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb;datatransferpb\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3" + +pool = Google::Protobuf::DescriptorPool.generated_pool + +begin + pool.add_serialized_file(descriptor_data) +rescue TypeError + # Compatibility code: will be removed in the next major version. + require 'google/protobuf/descriptor_pb' + parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data) + parsed.clear_dependency + serialized = parsed.class.encode(parsed) + file = pool.add_serialized_file(serialized) + warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}" + imports = [ + ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"], + ["google.protobuf.Struct", "google/protobuf/struct.proto"], + ["google.rpc.Status", "google/rpc/status.proto"], + ["google.protobuf.StringValue", "google/protobuf/wrappers.proto"], + ] + imports.each do |type_name, expected_filename| + import_file = pool.lookup(type_name).file_descriptor + if import_file.name != expected_filename + warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}" + end + end + warn "Each proto file must use a consistent fully-qualified name." + warn "This will become an error in the next major version." +end + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + EmailPreferences = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EmailPreferences").msgclass + ScheduleOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptions").msgclass + ScheduleOptionsV2 = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2").msgclass + TimeBasedSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TimeBasedSchedule").msgclass + ManualSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ManualSchedule").msgclass + EventDrivenSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EventDrivenSchedule").msgclass + UserInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UserInfo").msgclass + TransferConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferConfig").msgclass + EncryptionConfiguration = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration").msgclass + TransferRun = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferRun").msgclass + TransferMessage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferMessage").msgclass + TransferMessage::MessageSeverity = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity").enummodule + TransferType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferType").enummodule + TransferState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferState").enummodule + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md new file mode 100644 index 000000000000..614f3cdea732 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md @@ -0,0 +1,4 @@ +# BigQuery Data Transfer Service V1 Protocol Buffer Documentation + +These files are for the YARD documentation of the generated protobuf files. +They are not intended to be required or loaded at runtime. diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb new file mode 100644 index 000000000000..2223d8935069 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb @@ -0,0 +1,420 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Api + # Required information for every language. + # @!attribute [rw] reference_docs_uri + # @deprecated This field is deprecated and may be removed in the next major version update. + # @return [::String] + # Link to automatically generated reference documentation. Example: + # https://cloud.google.com/nodejs/docs/reference/asset/latest + # @!attribute [rw] destinations + # @return [::Array<::Google::Api::ClientLibraryDestination>] + # The destination where API teams want this client library to be published. + class CommonLanguageSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Details about how and where to publish client libraries. + # @!attribute [rw] version + # @return [::String] + # Version of the API to apply these settings to. This is the full protobuf + # package for the API, ending in the version element. + # Examples: "google.cloud.speech.v1" and "google.spanner.admin.database.v1". + # @!attribute [rw] launch_stage + # @return [::Google::Api::LaunchStage] + # Launch stage of this version of the API. + # @!attribute [rw] rest_numeric_enums + # @return [::Boolean] + # When using transport=rest, the client request will encode enums as + # numbers rather than strings. + # @!attribute [rw] java_settings + # @return [::Google::Api::JavaSettings] + # Settings for legacy Java features, supported in the Service YAML. + # @!attribute [rw] cpp_settings + # @return [::Google::Api::CppSettings] + # Settings for C++ client libraries. + # @!attribute [rw] php_settings + # @return [::Google::Api::PhpSettings] + # Settings for PHP client libraries. + # @!attribute [rw] python_settings + # @return [::Google::Api::PythonSettings] + # Settings for Python client libraries. + # @!attribute [rw] node_settings + # @return [::Google::Api::NodeSettings] + # Settings for Node client libraries. + # @!attribute [rw] dotnet_settings + # @return [::Google::Api::DotnetSettings] + # Settings for .NET client libraries. + # @!attribute [rw] ruby_settings + # @return [::Google::Api::RubySettings] + # Settings for Ruby client libraries. + # @!attribute [rw] go_settings + # @return [::Google::Api::GoSettings] + # Settings for Go client libraries. + class ClientLibrarySettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # This message configures the settings for publishing [Google Cloud Client + # libraries](https://cloud.google.com/apis/docs/cloud-client-libraries) + # generated from the service config. + # @!attribute [rw] method_settings + # @return [::Array<::Google::Api::MethodSettings>] + # A list of API method settings, e.g. the behavior for methods that use the + # long-running operation pattern. + # @!attribute [rw] new_issue_uri + # @return [::String] + # Link to a *public* URI where users can report issues. Example: + # https://issuetracker.google.com/issues/new?component=190865&template=1161103 + # @!attribute [rw] documentation_uri + # @return [::String] + # Link to product home page. Example: + # https://cloud.google.com/asset-inventory/docs/overview + # @!attribute [rw] api_short_name + # @return [::String] + # Used as a tracking tag when collecting data about the APIs developer + # relations artifacts like docs, packages delivered to package managers, + # etc. Example: "speech". + # @!attribute [rw] github_label + # @return [::String] + # GitHub label to apply to issues and pull requests opened for this API. + # @!attribute [rw] codeowner_github_teams + # @return [::Array<::String>] + # GitHub teams to be added to CODEOWNERS in the directory in GitHub + # containing source code for the client libraries for this API. + # @!attribute [rw] doc_tag_prefix + # @return [::String] + # A prefix used in sample code when demarking regions to be included in + # documentation. + # @!attribute [rw] organization + # @return [::Google::Api::ClientLibraryOrganization] + # For whom the client library is being published. + # @!attribute [rw] library_settings + # @return [::Array<::Google::Api::ClientLibrarySettings>] + # Client library settings. If the same version string appears multiple + # times in this list, then the last one wins. Settings from earlier + # settings with the same version string are discarded. + # @!attribute [rw] proto_reference_documentation_uri + # @return [::String] + # Optional link to proto reference documentation. Example: + # https://cloud.google.com/pubsub/lite/docs/reference/rpc + # @!attribute [rw] rest_reference_documentation_uri + # @return [::String] + # Optional link to REST reference documentation. Example: + # https://cloud.google.com/pubsub/lite/docs/reference/rest + class Publishing + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Settings for Java client libraries. + # @!attribute [rw] library_package + # @return [::String] + # The package name to use in Java. Clobbers the java_package option + # set in the protobuf. This should be used **only** by APIs + # who have already set the language_settings.java.package_name" field + # in gapic.yaml. API teams should use the protobuf java_package option + # where possible. + # + # Example of a YAML configuration:: + # + # publishing: + # java_settings: + # library_package: com.google.cloud.pubsub.v1 + # @!attribute [rw] service_class_names + # @return [::Google::Protobuf::Map{::String => ::String}] + # Configure the Java class name to use instead of the service's for its + # corresponding generated GAPIC client. Keys are fully-qualified + # service names as they appear in the protobuf (including the full + # the language_settings.java.interface_names" field in gapic.yaml. API + # teams should otherwise use the service name as it appears in the + # protobuf. + # + # Example of a YAML configuration:: + # + # publishing: + # java_settings: + # service_class_names: + # - google.pubsub.v1.Publisher: TopicAdmin + # - google.pubsub.v1.Subscriber: SubscriptionAdmin + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + class JavaSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # @!attribute [rw] key + # @return [::String] + # @!attribute [rw] value + # @return [::String] + class ServiceClassNamesEntry + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end + + # Settings for C++ client libraries. + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + class CppSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Settings for Php client libraries. + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + class PhpSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Settings for Python client libraries. + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + # @!attribute [rw] experimental_features + # @return [::Google::Api::PythonSettings::ExperimentalFeatures] + # Experimental features to be included during client library generation. + class PythonSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # Experimental features to be included during client library generation. + # These fields will be deprecated once the feature graduates and is enabled + # by default. + # @!attribute [rw] rest_async_io_enabled + # @return [::Boolean] + # Enables generation of asynchronous REST clients if `rest` transport is + # enabled. By default, asynchronous REST clients will not be generated. + # This feature will be enabled by default 1 month after launching the + # feature in preview packages. + class ExperimentalFeatures + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end + + # Settings for Node client libraries. + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + class NodeSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Settings for Dotnet client libraries. + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + # @!attribute [rw] renamed_services + # @return [::Google::Protobuf::Map{::String => ::String}] + # Map from original service names to renamed versions. + # This is used when the default generated types + # would cause a naming conflict. (Neither name is + # fully-qualified.) + # Example: Subscriber to SubscriberServiceApi. + # @!attribute [rw] renamed_resources + # @return [::Google::Protobuf::Map{::String => ::String}] + # Map from full resource types to the effective short name + # for the resource. This is used when otherwise resource + # named from different services would cause naming collisions. + # Example entry: + # "datalabeling.googleapis.com/Dataset": "DataLabelingDataset" + # @!attribute [rw] ignored_resources + # @return [::Array<::String>] + # List of full resource types to ignore during generation. + # This is typically used for API-specific Location resources, + # which should be handled by the generator as if they were actually + # the common Location resources. + # Example entry: "documentai.googleapis.com/Location" + # @!attribute [rw] forced_namespace_aliases + # @return [::Array<::String>] + # Namespaces which must be aliased in snippets due to + # a known (but non-generator-predictable) naming collision + # @!attribute [rw] handwritten_signatures + # @return [::Array<::String>] + # Method signatures (in the form "service.method(signature)") + # which are provided separately, so shouldn't be generated. + # Snippets *calling* these methods are still generated, however. + class DotnetSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # @!attribute [rw] key + # @return [::String] + # @!attribute [rw] value + # @return [::String] + class RenamedServicesEntry + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # @!attribute [rw] key + # @return [::String] + # @!attribute [rw] value + # @return [::String] + class RenamedResourcesEntry + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end + + # Settings for Ruby client libraries. + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + class RubySettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Settings for Go client libraries. + # @!attribute [rw] common + # @return [::Google::Api::CommonLanguageSettings] + # Some settings. + class GoSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Describes the generator configuration for a method. + # @!attribute [rw] selector + # @return [::String] + # The fully qualified name of the method, for which the options below apply. + # This is used to find the method to apply the options. + # + # Example: + # + # publishing: + # method_settings: + # - selector: google.storage.control.v2.StorageControl.CreateFolder + # # method settings for CreateFolder... + # @!attribute [rw] long_running + # @return [::Google::Api::MethodSettings::LongRunning] + # Describes settings to use for long-running operations when generating + # API methods for RPCs. Complements RPCs that use the annotations in + # google/longrunning/operations.proto. + # + # Example of a YAML configuration:: + # + # publishing: + # method_settings: + # - selector: google.cloud.speech.v2.Speech.BatchRecognize + # long_running: + # initial_poll_delay: 60s # 1 minute + # poll_delay_multiplier: 1.5 + # max_poll_delay: 360s # 6 minutes + # total_poll_timeout: 54000s # 90 minutes + # @!attribute [rw] auto_populated_fields + # @return [::Array<::String>] + # List of top-level fields of the request message, that should be + # automatically populated by the client libraries based on their + # (google.api.field_info).format. Currently supported format: UUID4. + # + # Example of a YAML configuration: + # + # publishing: + # method_settings: + # - selector: google.example.v1.ExampleService.CreateExample + # auto_populated_fields: + # - request_id + class MethodSettings + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # Describes settings to use when generating API methods that use the + # long-running operation pattern. + # All default values below are from those used in the client library + # generators (e.g. + # [Java](https://github.com/googleapis/gapic-generator-java/blob/04c2faa191a9b5a10b92392fe8482279c4404803/src/main/java/com/google/api/generator/gapic/composer/common/RetrySettingsComposer.java)). + # @!attribute [rw] initial_poll_delay + # @return [::Google::Protobuf::Duration] + # Initial delay after which the first poll request will be made. + # Default value: 5 seconds. + # @!attribute [rw] poll_delay_multiplier + # @return [::Float] + # Multiplier to gradually increase delay between subsequent polls until it + # reaches max_poll_delay. + # Default value: 1.5. + # @!attribute [rw] max_poll_delay + # @return [::Google::Protobuf::Duration] + # Maximum time between two subsequent poll requests. + # Default value: 45 seconds. + # @!attribute [rw] total_poll_timeout + # @return [::Google::Protobuf::Duration] + # Total polling timeout. + # Default value: 5 minutes. + class LongRunning + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end + + # The organization for which the client libraries are being published. + # Affects the url where generated docs are published, etc. + module ClientLibraryOrganization + # Not useful. + CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED = 0 + + # Google Cloud Platform Org. + CLOUD = 1 + + # Ads (Advertising) Org. + ADS = 2 + + # Photos Org. + PHOTOS = 3 + + # Street View Org. + STREET_VIEW = 4 + + # Shopping Org. + SHOPPING = 5 + + # Geo Org. + GEO = 6 + + # Generative AI - https://developers.generativeai.google + GENERATIVE_AI = 7 + end + + # To where should client libraries be published? + module ClientLibraryDestination + # Client libraries will neither be generated nor published to package + # managers. + CLIENT_LIBRARY_DESTINATION_UNSPECIFIED = 0 + + # Generate the client library in a repo under github.com/googleapis, + # but don't publish it to package managers. + GITHUB = 10 + + # Publish the library to package managers like nuget.org and npmjs.com. + PACKAGE_MANAGER = 20 + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb new file mode 100644 index 000000000000..b03587481349 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Api + # An indicator of the behavior of a given field (for example, that a field + # is required in requests, or given as output but ignored as input). + # This **does not** change the behavior in protocol buffers itself; it only + # denotes the behavior and may affect how API tooling handles the field. + # + # Note: This enum **may** receive new values in the future. + module FieldBehavior + # Conventional default for enums. Do not use this. + FIELD_BEHAVIOR_UNSPECIFIED = 0 + + # Specifically denotes a field as optional. + # While all fields in protocol buffers are optional, this may be specified + # for emphasis if appropriate. + OPTIONAL = 1 + + # Denotes a field as required. + # This indicates that the field **must** be provided as part of the request, + # and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + REQUIRED = 2 + + # Denotes a field as output only. + # This indicates that the field is provided in responses, but including the + # field in a request does nothing (the server *must* ignore it and + # *must not* throw an error as a result of the field's presence). + OUTPUT_ONLY = 3 + + # Denotes a field as input only. + # This indicates that the field is provided in requests, and the + # corresponding field is not included in output. + INPUT_ONLY = 4 + + # Denotes a field as immutable. + # This indicates that the field may be set once in a request to create a + # resource, but may not be changed thereafter. + IMMUTABLE = 5 + + # Denotes that a (repeated) field is an unordered list. + # This indicates that the service may provide the elements of the list + # in any arbitrary order, rather than the order the user originally + # provided. Additionally, the list's order may or may not be stable. + UNORDERED_LIST = 6 + + # Denotes that this field returns a non-empty default value if not set. + # This indicates that if the user provides the empty value in a request, + # a non-empty value will be returned. The user will not be aware of what + # non-empty value to expect. + NON_EMPTY_DEFAULT = 7 + + # Denotes that the field in a resource (a message annotated with + # google.api.resource) is used in the resource name to uniquely identify the + # resource. For AIP-compliant APIs, this should only be applied to the + # `name` field on the resource. + # + # This behavior should not be applied to references to other resources within + # the message. + # + # The identifier field of resources often have different field behavior + # depending on the request it is embedded in (e.g. for Create methods name + # is optional and unused, while for Update methods it is required). Instead + # of method-specific annotations, only `IDENTIFIER` is required. + IDENTIFIER = 8 + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb new file mode 100644 index 000000000000..38b4b61e6061 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Api + # The launch stage as defined by [Google Cloud Platform + # Launch Stages](https://cloud.google.com/terms/launch-stages). + module LaunchStage + # Do not use this default value. + LAUNCH_STAGE_UNSPECIFIED = 0 + + # The feature is not yet implemented. Users can not use it. + UNIMPLEMENTED = 6 + + # Prelaunch features are hidden from users and are only visible internally. + PRELAUNCH = 7 + + # Early Access features are limited to a closed group of testers. To use + # these features, you must sign up in advance and sign a Trusted Tester + # agreement (which includes confidentiality provisions). These features may + # be unstable, changed in backward-incompatible ways, and are not + # guaranteed to be released. + EARLY_ACCESS = 1 + + # Alpha is a limited availability test for releases before they are cleared + # for widespread use. By Alpha, all significant design issues are resolved + # and we are in the process of verifying functionality. Alpha customers + # need to apply for access, agree to applicable terms, and have their + # projects allowlisted. Alpha releases don't have to be feature complete, + # no SLAs are provided, and there are no technical support obligations, but + # they will be far enough along that customers can actually use them in + # test environments or for limited-use tests -- just like they would in + # normal production cases. + ALPHA = 2 + + # Beta is the point at which we are ready to open a release for any + # customer to use. There are no SLA or technical support obligations in a + # Beta release. Products will be complete from a feature perspective, but + # may have some open outstanding issues. Beta releases are suitable for + # limited production use cases. + BETA = 3 + + # GA features are open to all developers and are considered stable and + # fully qualified for production use. + GA = 4 + + # Deprecated features are scheduled to be shut down and removed. For more + # information, see the "Deprecation Policy" section of our [Terms of + # Service](https://cloud.google.com/terms/) + # and the [Google Cloud Platform Subject to the Deprecation + # Policy](https://cloud.google.com/terms/deprecation) documentation. + DEPRECATED = 5 + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb new file mode 100644 index 000000000000..935946d52792 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb @@ -0,0 +1,227 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Api + # A simple descriptor of a resource type. + # + # ResourceDescriptor annotates a resource message (either by means of a + # protobuf annotation or use in the service config), and associates the + # resource's schema, the resource type, and the pattern of the resource name. + # + # Example: + # + # message Topic { + # // Indicates this message defines a resource schema. + # // Declares the resource type in the format of {service}/{kind}. + # // For Kubernetes resources, the format is {api group}/{kind}. + # option (google.api.resource) = { + # type: "pubsub.googleapis.com/Topic" + # pattern: "projects/{project}/topics/{topic}" + # }; + # } + # + # The ResourceDescriptor Yaml config will look like: + # + # resources: + # - type: "pubsub.googleapis.com/Topic" + # pattern: "projects/{project}/topics/{topic}" + # + # Sometimes, resources have multiple patterns, typically because they can + # live under multiple parents. + # + # Example: + # + # message LogEntry { + # option (google.api.resource) = { + # type: "logging.googleapis.com/LogEntry" + # pattern: "projects/{project}/logs/{log}" + # pattern: "folders/{folder}/logs/{log}" + # pattern: "organizations/{organization}/logs/{log}" + # pattern: "billingAccounts/{billing_account}/logs/{log}" + # }; + # } + # + # The ResourceDescriptor Yaml config will look like: + # + # resources: + # - type: 'logging.googleapis.com/LogEntry' + # pattern: "projects/{project}/logs/{log}" + # pattern: "folders/{folder}/logs/{log}" + # pattern: "organizations/{organization}/logs/{log}" + # pattern: "billingAccounts/{billing_account}/logs/{log}" + # @!attribute [rw] type + # @return [::String] + # The resource type. It must be in the format of + # \\{service_name}/\\{resource_type_kind}. The `resource_type_kind` must be + # singular and must not include version numbers. + # + # Example: `storage.googleapis.com/Bucket` + # + # The value of the resource_type_kind must follow the regular expression + # /[A-Za-z][a-zA-Z0-9]+/. It should start with an upper case character and + # should use PascalCase (UpperCamelCase). The maximum number of + # characters allowed for the `resource_type_kind` is 100. + # @!attribute [rw] pattern + # @return [::Array<::String>] + # Optional. The relative resource name pattern associated with this resource + # type. The DNS prefix of the full resource name shouldn't be specified here. + # + # The path pattern must follow the syntax, which aligns with HTTP binding + # syntax: + # + # Template = Segment { "/" Segment } ; + # Segment = LITERAL | Variable ; + # Variable = "{" LITERAL "}" ; + # + # Examples: + # + # - "projects/\\{project}/topics/\\{topic}" + # - "projects/\\{project}/knowledgeBases/\\{knowledge_base}" + # + # The components in braces correspond to the IDs for each resource in the + # hierarchy. It is expected that, if multiple patterns are provided, + # the same component name (e.g. "project") refers to IDs of the same + # type of resource. + # @!attribute [rw] name_field + # @return [::String] + # Optional. The field on the resource that designates the resource name + # field. If omitted, this is assumed to be "name". + # @!attribute [rw] history + # @return [::Google::Api::ResourceDescriptor::History] + # Optional. The historical or future-looking state of the resource pattern. + # + # Example: + # + # // The InspectTemplate message originally only supported resource + # // names with organization, and project was added later. + # message InspectTemplate { + # option (google.api.resource) = { + # type: "dlp.googleapis.com/InspectTemplate" + # pattern: + # "organizations/{organization}/inspectTemplates/{inspect_template}" + # pattern: "projects/{project}/inspectTemplates/{inspect_template}" + # history: ORIGINALLY_SINGLE_PATTERN + # }; + # } + # @!attribute [rw] plural + # @return [::String] + # The plural name used in the resource name and permission names, such as + # 'projects' for the resource name of 'projects/\\{project}' and the permission + # name of 'cloudresourcemanager.googleapis.com/projects.get'. One exception + # to this is for Nested Collections that have stuttering names, as defined + # in [AIP-122](https://google.aip.dev/122#nested-collections), where the + # collection ID in the resource name pattern does not necessarily directly + # match the `plural` value. + # + # It is the same concept of the `plural` field in k8s CRD spec + # https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ + # + # Note: The plural form is required even for singleton resources. See + # https://aip.dev/156 + # @!attribute [rw] singular + # @return [::String] + # The same concept of the `singular` field in k8s CRD spec + # https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ + # Such as "project" for the `resourcemanager.googleapis.com/Project` type. + # @!attribute [rw] style + # @return [::Array<::Google::Api::ResourceDescriptor::Style>] + # Style flag(s) for this resource. + # These indicate that a resource is expected to conform to a given + # style. See the specific style flags for additional information. + class ResourceDescriptor + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # A description of the historical or future-looking state of the + # resource pattern. + module History + # The "unset" value. + HISTORY_UNSPECIFIED = 0 + + # The resource originally had one pattern and launched as such, and + # additional patterns were added later. + ORIGINALLY_SINGLE_PATTERN = 1 + + # The resource has one pattern, but the API owner expects to add more + # later. (This is the inverse of ORIGINALLY_SINGLE_PATTERN, and prevents + # that from being necessary once there are multiple patterns.) + FUTURE_MULTI_PATTERN = 2 + end + + # A flag representing a specific style that a resource claims to conform to. + module Style + # The unspecified value. Do not use. + STYLE_UNSPECIFIED = 0 + + # This resource is intended to be "declarative-friendly". + # + # Declarative-friendly resources must be more strictly consistent, and + # setting this to true communicates to tools that this resource should + # adhere to declarative-friendly expectations. + # + # Note: This is used by the API linter (linter.aip.dev) to enable + # additional checks. + DECLARATIVE_FRIENDLY = 1 + end + end + + # Defines a proto annotation that describes a string field that refers to + # an API resource. + # @!attribute [rw] type + # @return [::String] + # The resource type that the annotated field references. + # + # Example: + # + # message Subscription { + # string topic = 2 [(google.api.resource_reference) = { + # type: "pubsub.googleapis.com/Topic" + # }]; + # } + # + # Occasionally, a field may reference an arbitrary resource. In this case, + # APIs use the special value * in their resource reference. + # + # Example: + # + # message GetIamPolicyRequest { + # string resource = 2 [(google.api.resource_reference) = { + # type: "*" + # }]; + # } + # @!attribute [rw] child_type + # @return [::String] + # The resource type of a child collection that the annotated field + # references. This is useful for annotating the `parent` field that + # doesn't have a fixed resource type. + # + # Example: + # + # message ListLogEntriesRequest { + # string parent = 1 [(google.api.resource_reference) = { + # child_type: "logging.googleapis.com/LogEntry" + # }; + # } + class ResourceReference + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb new file mode 100644 index 000000000000..425e671dc477 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb @@ -0,0 +1,722 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + # A parameter used to define custom fields in a data source definition. + # @!attribute [rw] param_id + # @return [::String] + # Parameter identifier. + # @!attribute [rw] display_name + # @return [::String] + # Parameter display name in the user interface. + # @!attribute [rw] description + # @return [::String] + # Parameter description. + # @!attribute [rw] type + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter::Type] + # Parameter type. + # @!attribute [rw] required + # @return [::Boolean] + # Is parameter required. + # @!attribute [rw] repeated + # @return [::Boolean] + # Deprecated. This field has no effect. + # @!attribute [rw] validation_regex + # @return [::String] + # Regular expression which can be used for parameter validation. + # @!attribute [rw] allowed_values + # @return [::Array<::String>] + # All possible values for the parameter. + # @!attribute [rw] min_value + # @return [::Google::Protobuf::DoubleValue] + # For integer and double values specifies minimum allowed value. + # @!attribute [rw] max_value + # @return [::Google::Protobuf::DoubleValue] + # For integer and double values specifies maximum allowed value. + # @!attribute [rw] fields + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter>] + # Deprecated. This field has no effect. + # @!attribute [rw] validation_description + # @return [::String] + # Description of the requirements for this field, in case the user input does + # not fulfill the regex pattern or min/max values. + # @!attribute [rw] validation_help_url + # @return [::String] + # URL to a help document to further explain the naming requirements. + # @!attribute [rw] immutable + # @return [::Boolean] + # Cannot be changed after initial creation. + # @!attribute [rw] recurse + # @return [::Boolean] + # Deprecated. This field has no effect. + # @!attribute [rw] deprecated + # @return [::Boolean] + # If true, it should not be used in new transfers, and it should not be + # visible to users. + class DataSourceParameter + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # Parameter type. + module Type + # Type unspecified. + TYPE_UNSPECIFIED = 0 + + # String parameter. + STRING = 1 + + # Integer parameter (64-bits). + # Will be serialized to json as string. + INTEGER = 2 + + # Double precision floating point parameter. + DOUBLE = 3 + + # Boolean parameter. + BOOLEAN = 4 + + # Deprecated. This field has no effect. + RECORD = 5 + + # Page ID for a Google+ Page. + PLUS_PAGE = 6 + + # List of strings parameter. + LIST = 7 + end + end + + # Defines the properties and custom parameters for a data source. + # @!attribute [r] name + # @return [::String] + # Output only. Data source resource name. + # @!attribute [rw] data_source_id + # @return [::String] + # Data source id. + # @!attribute [rw] display_name + # @return [::String] + # User friendly data source name. + # @!attribute [rw] description + # @return [::String] + # User friendly data source description string. + # @!attribute [rw] client_id + # @return [::String] + # Data source client id which should be used to receive refresh token. + # @!attribute [rw] scopes + # @return [::Array<::String>] + # Api auth scopes for which refresh token needs to be obtained. These are + # scopes needed by a data source to prepare data and ingest them into + # BigQuery, e.g., https://www.googleapis.com/auth/bigquery + # @!attribute [rw] transfer_type + # @deprecated This field is deprecated and may be removed in the next major version update. + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferType] + # Deprecated. This field has no effect. + # @!attribute [rw] supports_multiple_transfers + # @deprecated This field is deprecated and may be removed in the next major version update. + # @return [::Boolean] + # Deprecated. This field has no effect. + # @!attribute [rw] update_deadline_seconds + # @return [::Integer] + # The number of seconds to wait for an update from the data source + # before the Data Transfer Service marks the transfer as FAILED. + # @!attribute [rw] default_schedule + # @return [::String] + # Default data transfer schedule. + # Examples of valid schedules include: + # `1st,3rd monday of month 15:30`, + # `every wed,fri of jan,jun 13:15`, and + # `first sunday of quarter 00:00`. + # @!attribute [rw] supports_custom_schedule + # @return [::Boolean] + # Specifies whether the data source supports a user defined schedule, or + # operates on the default schedule. + # When set to `true`, user can override default schedule. + # @!attribute [rw] parameters + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter>] + # Data source parameters. + # @!attribute [rw] help_url + # @return [::String] + # Url for the help document for this data source. + # @!attribute [rw] authorization_type + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource::AuthorizationType] + # Indicates the type of authorization. + # @!attribute [rw] data_refresh_type + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource::DataRefreshType] + # Specifies whether the data source supports automatic data refresh for the + # past few days, and how it's supported. + # For some data sources, data might not be complete until a few days later, + # so it's useful to refresh data automatically. + # @!attribute [rw] default_data_refresh_window_days + # @return [::Integer] + # Default data refresh window on days. + # Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`. + # @!attribute [rw] manual_runs_disabled + # @return [::Boolean] + # Disables backfilling and manual run scheduling + # for the data source. + # @!attribute [rw] minimum_schedule_interval + # @return [::Google::Protobuf::Duration] + # The minimum interval for scheduler to schedule runs. + class DataSource + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # The type of authorization needed for this data source. + module AuthorizationType + # Type unspecified. + AUTHORIZATION_TYPE_UNSPECIFIED = 0 + + # Use OAuth 2 authorization codes that can be exchanged + # for a refresh token on the backend. + AUTHORIZATION_CODE = 1 + + # Return an authorization code for a given Google+ page that can then be + # exchanged for a refresh token on the backend. + GOOGLE_PLUS_AUTHORIZATION_CODE = 2 + + # Use First Party OAuth. + FIRST_PARTY_OAUTH = 3 + end + + # Represents how the data source supports data auto refresh. + module DataRefreshType + # The data source won't support data auto refresh, which is default value. + DATA_REFRESH_TYPE_UNSPECIFIED = 0 + + # The data source supports data auto refresh, and runs will be scheduled + # for the past few days. Does not allow custom values to be set for each + # transfer config. + SLIDING_WINDOW = 1 + + # The data source supports data auto refresh, and runs will be scheduled + # for the past few days. Allows custom values to be set for each transfer + # config. + CUSTOM_SLIDING_WINDOW = 2 + end + end + + # A request to get data source info. + # @!attribute [rw] name + # @return [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/dataSources/{data_source_id}` or + # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` + class GetDataSourceRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Request to list supported data sources and their data transfer settings. + # @!attribute [rw] parent + # @return [::String] + # Required. The BigQuery project id for which data sources should be + # returned. Must be in the form: `projects/{project_id}` or + # `projects/{project_id}/locations/{location_id}` + # @!attribute [rw] page_token + # @return [::String] + # Pagination token, which can be used to request a specific page + # of `ListDataSourcesRequest` list results. For multiple-page + # results, `ListDataSourcesResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @!attribute [rw] page_size + # @return [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + class ListDataSourcesRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Returns list of supported data sources and their metadata. + # @!attribute [rw] data_sources + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] + # List of supported data sources and their transfer settings. + # @!attribute [r] next_page_token + # @return [::String] + # Output only. The next-pagination token. For multiple-page list results, + # this token can be used as the + # `ListDataSourcesRequest.page_token` + # to request the next page of list results. + class ListDataSourcesResponse + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to create a data transfer configuration. If new credentials are + # needed for this transfer configuration, authorization info must be provided. + # If authorization info is provided, the transfer configuration will be + # associated with the user id corresponding to the authorization info. + # Otherwise, the transfer configuration will be associated with the calling + # user. + # + # When using a cross project service account for creating a transfer config, + # you must enable cross project service account usage. For more information, + # see [Disable attachment of service accounts to resources in other + # projects](https://cloud.google.com/resource-manager/docs/organization-policy/restricting-service-accounts#disable_cross_project_service_accounts). + # @!attribute [rw] parent + # @return [::String] + # Required. The BigQuery project id where the transfer configuration should + # be created. Must be in the format + # projects/\\{project_id}/locations/\\{location_id} or projects/\\{project_id}. If + # specified location and location of the destination bigquery dataset do not + # match - the request will fail. + # @!attribute [rw] transfer_config + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # Required. Data transfer configuration to create. + # @!attribute [rw] authorization_code + # @deprecated This field is deprecated and may be removed in the next major version update. + # @return [::String] + # Deprecated: Authorization code was required when + # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used + # in any data sources. Use `version_info` instead. + # + # Optional OAuth2 authorization code to use with this transfer configuration. + # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' + # and new credentials are needed, as indicated by `CheckValidCreds`. In order + # to obtain authorization_code, make a request to the following URL: + #
+          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+          #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # create the transfer config. + # @!attribute [rw] version_info + # @return [::String] + # Optional version info. This parameter replaces `authorization_code` which + # is no longer used in any data sources. This is required only if + # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials + # are needed, as indicated by `CheckValidCreds`. In order to obtain version + # info, make a request to the following URL: + #
+          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+          #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # create the transfer config. + # @!attribute [rw] service_account_name + # @return [::String] + # Optional service account email. If this field is set, the transfer config + # will be created with this service account's credentials. It requires that + # the requesting user calling this API has permissions to act as this service + # account. + # + # Note that not all data sources support service account credentials when + # creating a transfer config. For the latest list of data sources, read about + # [using service + # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). + class CreateTransferConfigRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to update a transfer configuration. To update the user id of the + # transfer configuration, authorization info needs to be provided. + # + # When using a cross project service account for updating a transfer config, + # you must enable cross project service account usage. For more information, + # see [Disable attachment of service accounts to resources in other + # projects](https://cloud.google.com/resource-manager/docs/organization-policy/restricting-service-accounts#disable_cross_project_service_accounts). + # @!attribute [rw] transfer_config + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] + # Required. Data transfer configuration to create. + # @!attribute [rw] authorization_code + # @deprecated This field is deprecated and may be removed in the next major version update. + # @return [::String] + # Deprecated: Authorization code was required when + # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used + # in any data sources. Use `version_info` instead. + # + # Optional OAuth2 authorization code to use with this transfer configuration. + # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' + # and new credentials are needed, as indicated by `CheckValidCreds`. In order + # to obtain authorization_code, make a request to the following URL: + #
+          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+          #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # update the transfer config. + # @!attribute [rw] update_mask + # @return [::Google::Protobuf::FieldMask] + # Required. Required list of fields to be updated in this request. + # @!attribute [rw] version_info + # @return [::String] + # Optional version info. This parameter replaces `authorization_code` which + # is no longer used in any data sources. This is required only if + # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials + # are needed, as indicated by `CheckValidCreds`. In order to obtain version + # info, make a request to the following URL: + #
+          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+          #     
+ # * The client_id is the OAuth client_id of the data source as + # returned by ListDataSources method. + # * data_source_scopes are the scopes returned by ListDataSources + # method. + # + # Note that this should not be set when `service_account_name` is used to + # update the transfer config. + # @!attribute [rw] service_account_name + # @return [::String] + # Optional service account email. If this field is set, the transfer config + # will be created with this service account's credentials. It requires that + # the requesting user calling this API has permissions to act as this service + # account. + # + # Note that not all data sources support service account credentials when + # creating a transfer config. For the latest list of data sources, read about + # [using service + # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). + class UpdateTransferConfigRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to get data transfer information. + # @!attribute [rw] name + # @return [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + class GetTransferConfigRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to delete data transfer information. All associated transfer runs + # and log messages will be deleted as well. + # @!attribute [rw] name + # @return [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + class DeleteTransferConfigRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to get data transfer run information. + # @!attribute [rw] name + # @return [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + # or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + class GetTransferRunRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to delete data transfer run information. + # @!attribute [rw] name + # @return [::String] + # Required. The field will contain name of the resource requested, for + # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + # or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + class DeleteTransferRunRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to list data transfers configured for a BigQuery project. + # @!attribute [rw] parent + # @return [::String] + # Required. The BigQuery project id for which transfer configs + # should be returned: `projects/{project_id}` or + # `projects/{project_id}/locations/{location_id}` + # @!attribute [rw] data_source_ids + # @return [::Array<::String>] + # When specified, only configurations of requested data sources are returned. + # @!attribute [rw] page_token + # @return [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransfersRequest` list results. For multiple-page + # results, `ListTransfersResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @!attribute [rw] page_size + # @return [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + class ListTransferConfigsRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # The returned list of pipelines in the project. + # @!attribute [r] transfer_configs + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] + # Output only. The stored pipeline transfer configurations. + # @!attribute [r] next_page_token + # @return [::String] + # Output only. The next-pagination token. For multiple-page list results, + # this token can be used as the + # `ListTransferConfigsRequest.page_token` + # to request the next page of list results. + class ListTransferConfigsResponse + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to list data transfer runs. + # @!attribute [rw] parent + # @return [::String] + # Required. Name of transfer configuration for which transfer runs should be + # retrieved. Format of transfer configuration resource name is: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @!attribute [rw] states + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferState>] + # When specified, only transfer runs with requested states are returned. + # @!attribute [rw] page_token + # @return [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransferRunsRequest` list results. For multiple-page + # results, `ListTransferRunsResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @!attribute [rw] page_size + # @return [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @!attribute [rw] run_attempt + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt] + # Indicates how run attempts are to be pulled. + class ListTransferRunsRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # Represents which runs should be pulled. + module RunAttempt + # All runs should be returned. + RUN_ATTEMPT_UNSPECIFIED = 0 + + # Only latest run per day should be returned. + LATEST = 1 + end + end + + # The returned list of pipelines in the project. + # @!attribute [r] transfer_runs + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] + # Output only. The stored pipeline transfer runs. + # @!attribute [r] next_page_token + # @return [::String] + # Output only. The next-pagination token. For multiple-page list results, + # this token can be used as the + # `ListTransferRunsRequest.page_token` + # to request the next page of list results. + class ListTransferRunsResponse + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to get user facing log messages associated with data transfer run. + # @!attribute [rw] parent + # @return [::String] + # Required. Transfer run name in the form: + # `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + # @!attribute [rw] page_token + # @return [::String] + # Pagination token, which can be used to request a specific page + # of `ListTransferLogsRequest` list results. For multiple-page + # results, `ListTransferLogsResponse` outputs + # a `next_page` token, which can be used as the + # `page_token` value to request the next page of list results. + # @!attribute [rw] page_size + # @return [::Integer] + # Page size. The default page size is the maximum value of 1000 results. + # @!attribute [rw] message_types + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>] + # Message types to return. If not populated - INFO, WARNING and ERROR + # messages are returned. + class ListTransferLogsRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # The returned list transfer run messages. + # @!attribute [r] transfer_messages + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] + # Output only. The stored pipeline transfer messages. + # @!attribute [r] next_page_token + # @return [::String] + # Output only. The next-pagination token. For multiple-page list results, + # this token can be used as the + # `GetTransferRunLogRequest.page_token` + # to request the next page of list results. + class ListTransferLogsResponse + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to determine whether the user has valid credentials. This method + # is used to limit the number of OAuth popups in the user interface. The + # user id is inferred from the API call context. + # If the data source has the Google+ authorization type, this method + # returns false, as it cannot be determined whether the credentials are + # already valid merely based on the user id. + # @!attribute [rw] name + # @return [::String] + # Required. The data source in the form: + # `projects/{project_id}/dataSources/{data_source_id}` or + # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. + class CheckValidCredsRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A response indicating whether the credentials exist and are valid. + # @!attribute [rw] has_valid_creds + # @return [::Boolean] + # If set to `true`, the credentials exist and are valid. + class CheckValidCredsResponse + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to schedule transfer runs for a time range. + # @!attribute [rw] parent + # @return [::String] + # Required. Transfer configuration name in the form: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @!attribute [rw] start_time + # @return [::Google::Protobuf::Timestamp] + # Required. Start time of the range of transfer runs. For example, + # `"2017-05-25T00:00:00+00:00"`. + # @!attribute [rw] end_time + # @return [::Google::Protobuf::Timestamp] + # Required. End time of the range of transfer runs. For example, + # `"2017-05-30T00:00:00+00:00"`. + class ScheduleTransferRunsRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A response to schedule transfer runs for a time range. + # @!attribute [rw] runs + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] + # The transfer runs that were scheduled. + class ScheduleTransferRunsResponse + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to start manual transfer runs. + # @!attribute [rw] parent + # @return [::String] + # Required. Transfer configuration name in the form: + # `projects/{project_id}/transferConfigs/{config_id}` or + # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + # @!attribute [rw] requested_time_range + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange] + # A time_range start and end timestamp for historical data files or reports + # that are scheduled to be transferred by the scheduled transfer run. + # requested_time_range must be a past time and cannot include future time + # values. + # @!attribute [rw] requested_run_time + # @return [::Google::Protobuf::Timestamp] + # A run_time timestamp for historical data files or reports + # that are scheduled to be transferred by the scheduled transfer run. + # requested_run_time must be a past time and cannot include future time + # values. + class StartManualTransferRunsRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # A specification for a time range, this will request transfer runs with + # run_time between start_time (inclusive) and end_time (exclusive). + # @!attribute [rw] start_time + # @return [::Google::Protobuf::Timestamp] + # Start time of the range of transfer runs. For example, + # `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than + # the end_time. Creates transfer runs where run_time is in the range + # between start_time (inclusive) and end_time (exclusive). + # @!attribute [rw] end_time + # @return [::Google::Protobuf::Timestamp] + # End time of the range of transfer runs. For example, + # `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future. + # Creates transfer runs where run_time is in the range between start_time + # (inclusive) and end_time (exclusive). + class TimeRange + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end + + # A response to start manual transfer runs. + # @!attribute [rw] runs + # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] + # The transfer runs that were created. + class StartManualTransferRunsResponse + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to enroll a set of data sources so they are visible in the + # BigQuery UI's `Transfer` tab. + # @!attribute [rw] name + # @return [::String] + # Required. The name of the project resource in the form: + # `projects/{project_id}` + # @!attribute [rw] data_source_ids + # @return [::Array<::String>] + # Data sources that are enrolled. It is required to provide at least one + # data source id. + class EnrollDataSourcesRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # A request to unenroll a set of data sources so they are no longer visible in + # the BigQuery UI's `Transfer` tab. + # @!attribute [rw] name + # @return [::String] + # Required. The name of the project resource in the form: + # `projects/{project_id}` + # @!attribute [rw] data_source_ids + # @return [::Array<::String>] + # Data sources that are unenrolled. It is required to provide at least one + # data source id. + class UnenrollDataSourcesRequest + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb new file mode 100644 index 000000000000..fe536677e3a6 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb @@ -0,0 +1,405 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Cloud + module Bigquery + module DataTransfer + module V1 + # Represents preferences for sending email notifications for transfer run + # events. + # @!attribute [rw] enable_failure_email + # @return [::Boolean] + # If true, email notifications will be sent on transfer run failures. + class EmailPreferences + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Options customizing the data transfer schedule. + # @!attribute [rw] disable_auto_scheduling + # @return [::Boolean] + # If true, automatic scheduling of data transfer runs for this configuration + # will be disabled. The runs can be started on ad-hoc basis using + # StartManualTransferRuns API. When automatic scheduling is disabled, the + # TransferConfig.schedule field will be ignored. + # @!attribute [rw] start_time + # @return [::Google::Protobuf::Timestamp] + # Specifies time to start scheduling transfer runs. The first run will be + # scheduled at or after the start time according to a recurrence pattern + # defined in the schedule string. The start time can be changed at any + # moment. The time when a data transfer can be triggered manually is not + # limited by this option. + # @!attribute [rw] end_time + # @return [::Google::Protobuf::Timestamp] + # Defines time to stop scheduling transfer runs. A transfer run cannot be + # scheduled at or after the end time. The end time can be changed at any + # moment. The time when a data transfer can be triggered manually is not + # limited by this option. + class ScheduleOptions + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # V2 options customizing different types of data transfer schedule. + # This field supports existing time-based and manual transfer schedule. Also + # supports Event-Driven transfer schedule. ScheduleOptionsV2 cannot be used + # together with ScheduleOptions/Schedule. + # @!attribute [rw] time_based_schedule + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TimeBasedSchedule] + # Time based transfer schedule options. This is the default schedule + # option. + # @!attribute [rw] manual_schedule + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ManualSchedule] + # Manual transfer schedule. If set, the transfer run will not be + # auto-scheduled by the system, unless the client invokes + # StartManualTransferRuns. This is equivalent to + # disable_auto_scheduling = true. + # @!attribute [rw] event_driven_schedule + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EventDrivenSchedule] + # Event driven transfer schedule options. If set, the transfer will be + # scheduled upon events arrial. + class ScheduleOptionsV2 + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Options customizing the time based transfer schedule. + # Options are migrated from the original ScheduleOptions message. + # @!attribute [rw] schedule + # @return [::String] + # Data transfer schedule. + # If the data source does not support a custom schedule, this should be + # empty. If it is empty, the default value for the data source will be used. + # The specified times are in UTC. + # Examples of valid format: + # `1st,3rd monday of month 15:30`, + # `every wed,fri of jan,jun 13:15`, and + # `first sunday of quarter 00:00`. + # See more explanation about the format here: + # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + # + # NOTE: The minimum interval time between recurring transfers depends on the + # data source; refer to the documentation for your data source. + # @!attribute [rw] start_time + # @return [::Google::Protobuf::Timestamp] + # Specifies time to start scheduling transfer runs. The first run will be + # scheduled at or after the start time according to a recurrence pattern + # defined in the schedule string. The start time can be changed at any + # moment. + # @!attribute [rw] end_time + # @return [::Google::Protobuf::Timestamp] + # Defines time to stop scheduling transfer runs. A transfer run cannot be + # scheduled at or after the end time. The end time can be changed at any + # moment. + class TimeBasedSchedule + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Options customizing manual transfers schedule. + class ManualSchedule + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Options customizing EventDriven transfers schedule. + # @!attribute [rw] pubsub_subscription + # @return [::String] + # Pub/Sub subscription name used to receive events. + # Only Google Cloud Storage data source support this option. + # Format: projects/\\{project}/subscriptions/\\{subscription} + class EventDrivenSchedule + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Information about a user. + # @!attribute [rw] email + # @return [::String] + # E-mail address of the user. + class UserInfo + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Represents a data transfer configuration. A transfer configuration + # contains all metadata needed to perform a data transfer. For example, + # `destination_dataset_id` specifies where data should be stored. + # When a new transfer configuration is created, the specified + # `destination_dataset_id` is created when needed and shared with the + # appropriate data source service account. + # @!attribute [rw] name + # @return [::String] + # Identifier. The resource name of the transfer config. + # Transfer config names have the form either + # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}` or + # `projects/{project_id}/transferConfigs/{config_id}`, + # where `config_id` is usually a UUID, even though it is not + # guaranteed or required. The name is ignored when creating a transfer + # config. + # @!attribute [rw] destination_dataset_id + # @return [::String] + # The BigQuery target dataset id. + # @!attribute [rw] display_name + # @return [::String] + # User specified display name for the data transfer. + # @!attribute [rw] data_source_id + # @return [::String] + # Data source ID. This cannot be changed once data transfer is created. The + # full list of available data source IDs can be returned through an API call: + # https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list + # @!attribute [rw] params + # @return [::Google::Protobuf::Struct] + # Parameters specific to each data source. For more information see the + # bq tab in the 'Setting up a data transfer' section for each data source. + # For example the parameters for Cloud Storage transfers are listed here: + # https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq + # @!attribute [rw] schedule + # @return [::String] + # Data transfer schedule. + # If the data source does not support a custom schedule, this should be + # empty. If it is empty, the default value for the data source will be used. + # The specified times are in UTC. + # Examples of valid format: + # `1st,3rd monday of month 15:30`, + # `every wed,fri of jan,jun 13:15`, and + # `first sunday of quarter 00:00`. + # See more explanation about the format here: + # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + # + # NOTE: The minimum interval time between recurring transfers depends on the + # data source; refer to the documentation for your data source. + # @!attribute [rw] schedule_options + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptions] + # Options customizing the data transfer schedule. + # @!attribute [rw] schedule_options_v2 + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptionsV2] + # Options customizing different types of data transfer schedule. + # This field replaces "schedule" and "schedule_options" fields. + # ScheduleOptionsV2 cannot be used together with ScheduleOptions/Schedule. + # @!attribute [rw] data_refresh_window_days + # @return [::Integer] + # The number of days to look back to automatically refresh the data. + # For example, if `data_refresh_window_days = 10`, then every day + # BigQuery reingests data for [today-10, today-1], rather than ingesting data + # for just [today-1]. + # Only valid if the data source supports the feature. Set the value to 0 + # to use the default value. + # @!attribute [rw] disabled + # @return [::Boolean] + # Is this config disabled. When set to true, no runs will be scheduled for + # this transfer config. + # @!attribute [r] update_time + # @return [::Google::Protobuf::Timestamp] + # Output only. Data transfer modification time. Ignored by server on input. + # @!attribute [r] next_run_time + # @return [::Google::Protobuf::Timestamp] + # Output only. Next time when data transfer will run. + # @!attribute [r] state + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferState] + # Output only. State of the most recently updated transfer run. + # @!attribute [rw] user_id + # @return [::Integer] + # Deprecated. Unique ID of the user on whose behalf transfer is done. + # @!attribute [r] dataset_region + # @return [::String] + # Output only. Region in which BigQuery dataset is located. + # @!attribute [rw] notification_pubsub_topic + # @return [::String] + # Pub/Sub topic where notifications will be sent after transfer runs + # associated with this transfer config finish. + # + # The format for specifying a pubsub topic is: + # `projects/{project_id}/topics/{topic_id}` + # @!attribute [rw] email_preferences + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences] + # Email notifications will be sent according to these preferences + # to the email address of the user who owns this transfer config. + # @!attribute [r] owner_info + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::UserInfo] + # Output only. Information about the user whose credentials are used to + # transfer data. Populated only for `transferConfigs.get` requests. In case + # the user information is not available, this field will not be populated. + # @!attribute [rw] encryption_configuration + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EncryptionConfiguration] + # The encryption configuration part. Currently, it is only used for the + # optional KMS key name. The BigQuery service account of your project must be + # granted permissions to use the key. Read methods will return the key name + # applied in effect. Write methods will apply the key if it is present, or + # otherwise try to apply project default keys if it is absent. + # @!attribute [r] error + # @return [::Google::Rpc::Status] + # Output only. Error code with detailed information about reason of the + # latest config failure. + class TransferConfig + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Represents the encryption configuration for a transfer. + # @!attribute [rw] kms_key_name + # @return [::Google::Protobuf::StringValue] + # The name of the KMS key used for encrypting BigQuery data. + class EncryptionConfiguration + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Represents a data transfer run. + # @!attribute [rw] name + # @return [::String] + # Identifier. The resource name of the transfer run. + # Transfer run names have the form + # `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. + # The name is ignored when creating a transfer run. + # @!attribute [rw] schedule_time + # @return [::Google::Protobuf::Timestamp] + # Minimum time after which a transfer run can be started. + # @!attribute [rw] run_time + # @return [::Google::Protobuf::Timestamp] + # For batch transfer runs, specifies the date and time of the data should be + # ingested. + # @!attribute [rw] error_status + # @return [::Google::Rpc::Status] + # Status of the transfer run. + # @!attribute [r] start_time + # @return [::Google::Protobuf::Timestamp] + # Output only. Time when transfer run was started. + # Parameter ignored by server for input requests. + # @!attribute [r] end_time + # @return [::Google::Protobuf::Timestamp] + # Output only. Time when transfer run ended. + # Parameter ignored by server for input requests. + # @!attribute [r] update_time + # @return [::Google::Protobuf::Timestamp] + # Output only. Last time the data transfer run state was updated. + # @!attribute [r] params + # @return [::Google::Protobuf::Struct] + # Output only. Parameters specific to each data source. For more information + # see the bq tab in the 'Setting up a data transfer' section for each data + # source. For example the parameters for Cloud Storage transfers are listed + # here: + # https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq + # @!attribute [r] destination_dataset_id + # @return [::String] + # Output only. The BigQuery target dataset id. + # @!attribute [r] data_source_id + # @return [::String] + # Output only. Data source id. + # @!attribute [rw] state + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferState] + # Data transfer run state. Ignored for input requests. + # @!attribute [rw] user_id + # @return [::Integer] + # Deprecated. Unique ID of the user on whose behalf transfer is done. + # @!attribute [r] schedule + # @return [::String] + # Output only. Describes the schedule of this transfer run if it was + # created as part of a regular schedule. For batch transfer runs that are + # scheduled manually, this is empty. + # NOTE: the system might choose to delay the schedule depending on the + # current load, so `schedule_time` doesn't always match this. + # @!attribute [r] notification_pubsub_topic + # @return [::String] + # Output only. Pub/Sub topic where a notification will be sent after this + # transfer run finishes. + # + # The format for specifying a pubsub topic is: + # `projects/{project_id}/topics/{topic_id}` + # @!attribute [r] email_preferences + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences] + # Output only. Email notifications will be sent according to these + # preferences to the email address of the user who owns the transfer config + # this run was derived from. + class TransferRun + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Represents a user facing message for a particular data transfer run. + # @!attribute [rw] message_time + # @return [::Google::Protobuf::Timestamp] + # Time when message was logged. + # @!attribute [rw] severity + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity] + # Message severity. + # @!attribute [rw] message_text + # @return [::String] + # Message text. + class TransferMessage + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # Represents data transfer user facing message severity. + module MessageSeverity + # No severity specified. + MESSAGE_SEVERITY_UNSPECIFIED = 0 + + # Informational message. + INFO = 1 + + # Warning message. + WARNING = 2 + + # Error message. + ERROR = 3 + end + end + + # DEPRECATED. Represents data transfer type. + # @deprecated This enum is deprecated and may be removed in the next major version update. + module TransferType + # Invalid or Unknown transfer type placeholder. + TRANSFER_TYPE_UNSPECIFIED = 0 + + # Batch data transfer. + BATCH = 1 + + # Streaming data transfer. Streaming data source currently doesn't + # support multiple transfer configs per project. + STREAMING = 2 + end + + # Represents data transfer run state. + module TransferState + # State placeholder (0). + TRANSFER_STATE_UNSPECIFIED = 0 + + # Data transfer is scheduled and is waiting to be picked up by + # data transfer backend (2). + PENDING = 2 + + # Data transfer is in progress (3). + RUNNING = 3 + + # Data transfer completed successfully (4). + SUCCEEDED = 4 + + # Data transfer failed (5). + FAILED = 5 + + # Data transfer is cancelled (6). + CANCELLED = 6 + end + end + end + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb new file mode 100644 index 000000000000..fb4d6862eac9 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb @@ -0,0 +1,145 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Protobuf + # `Any` contains an arbitrary serialized protocol buffer message along with a + # URL that describes the type of the serialized message. + # + # Protobuf library provides support to pack/unpack Any values in the form + # of utility functions or additional generated methods of the Any type. + # + # Example 1: Pack and unpack a message in C++. + # + # Foo foo = ...; + # Any any; + # any.PackFrom(foo); + # ... + # if (any.UnpackTo(&foo)) { + # ... + # } + # + # Example 2: Pack and unpack a message in Java. + # + # Foo foo = ...; + # Any any = Any.pack(foo); + # ... + # if (any.is(Foo.class)) { + # foo = any.unpack(Foo.class); + # } + # // or ... + # if (any.isSameTypeAs(Foo.getDefaultInstance())) { + # foo = any.unpack(Foo.getDefaultInstance()); + # } + # + # Example 3: Pack and unpack a message in Python. + # + # foo = Foo(...) + # any = Any() + # any.Pack(foo) + # ... + # if any.Is(Foo.DESCRIPTOR): + # any.Unpack(foo) + # ... + # + # Example 4: Pack and unpack a message in Go + # + # foo := &pb.Foo{...} + # any, err := anypb.New(foo) + # if err != nil { + # ... + # } + # ... + # foo := &pb.Foo{} + # if err := any.UnmarshalTo(foo); err != nil { + # ... + # } + # + # The pack methods provided by protobuf library will by default use + # 'type.googleapis.com/full.type.name' as the type URL and the unpack + # methods only use the fully qualified type name after the last '/' + # in the type URL, for example "foo.bar.com/x/y.z" will yield type + # name "y.z". + # + # JSON + # ==== + # The JSON representation of an `Any` value uses the regular + # representation of the deserialized, embedded message, with an + # additional field `@type` which contains the type URL. Example: + # + # package google.profile; + # message Person { + # string first_name = 1; + # string last_name = 2; + # } + # + # { + # "@type": "type.googleapis.com/google.profile.Person", + # "firstName": , + # "lastName": + # } + # + # If the embedded message type is well-known and has a custom JSON + # representation, that representation will be embedded adding a field + # `value` which holds the custom JSON in addition to the `@type` + # field. Example (for message [google.protobuf.Duration][]): + # + # { + # "@type": "type.googleapis.com/google.protobuf.Duration", + # "value": "1.212s" + # } + # @!attribute [rw] type_url + # @return [::String] + # A URL/resource name that uniquely identifies the type of the serialized + # protocol buffer message. This string must contain at least + # one "/" character. The last segment of the URL's path must represent + # the fully qualified name of the type (as in + # `path/google.protobuf.Duration`). The name should be in a canonical form + # (e.g., leading "." is not accepted). + # + # In practice, teams usually precompile into the binary all types that they + # expect it to use in the context of Any. However, for URLs which use the + # scheme `http`, `https`, or no scheme, one can optionally set up a type + # server that maps type URLs to message definitions as follows: + # + # * If no scheme is provided, `https` is assumed. + # * An HTTP GET on the URL must yield a [google.protobuf.Type][] + # value in binary format, or produce an error. + # * Applications are allowed to cache lookup results based on the + # URL, or have them precompiled into a binary to avoid any + # lookup. Therefore, binary compatibility needs to be preserved + # on changes to types. (Use versioned type names to manage + # breaking changes.) + # + # Note: this functionality is not currently available in the official + # protobuf release, and it is not used for type URLs beginning with + # type.googleapis.com. As of May 2023, there are no widely used type server + # implementations and no plans to implement one. + # + # Schemes other than `http`, `https` (or the empty scheme) might be + # used with implementation specific semantics. + # @!attribute [rw] value + # @return [::String] + # Must be a valid serialized protocol buffer of the above specified type. + class Any + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb new file mode 100644 index 000000000000..b5731a824060 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Protobuf + # A Duration represents a signed, fixed-length span of time represented + # as a count of seconds and fractions of seconds at nanosecond + # resolution. It is independent of any calendar and concepts like "day" + # or "month". It is related to Timestamp in that the difference between + # two Timestamp values is a Duration and it can be added or subtracted + # from a Timestamp. Range is approximately +-10,000 years. + # + # # Examples + # + # Example 1: Compute Duration from two Timestamps in pseudo code. + # + # Timestamp start = ...; + # Timestamp end = ...; + # Duration duration = ...; + # + # duration.seconds = end.seconds - start.seconds; + # duration.nanos = end.nanos - start.nanos; + # + # if (duration.seconds < 0 && duration.nanos > 0) { + # duration.seconds += 1; + # duration.nanos -= 1000000000; + # } else if (duration.seconds > 0 && duration.nanos < 0) { + # duration.seconds -= 1; + # duration.nanos += 1000000000; + # } + # + # Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + # + # Timestamp start = ...; + # Duration duration = ...; + # Timestamp end = ...; + # + # end.seconds = start.seconds + duration.seconds; + # end.nanos = start.nanos + duration.nanos; + # + # if (end.nanos < 0) { + # end.seconds -= 1; + # end.nanos += 1000000000; + # } else if (end.nanos >= 1000000000) { + # end.seconds += 1; + # end.nanos -= 1000000000; + # } + # + # Example 3: Compute Duration from datetime.timedelta in Python. + # + # td = datetime.timedelta(days=3, minutes=10) + # duration = Duration() + # duration.FromTimedelta(td) + # + # # JSON Mapping + # + # In JSON format, the Duration type is encoded as a string rather than an + # object, where the string ends in the suffix "s" (indicating seconds) and + # is preceded by the number of seconds, with nanoseconds expressed as + # fractional seconds. For example, 3 seconds with 0 nanoseconds should be + # encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + # be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + # microsecond should be expressed in JSON format as "3.000001s". + # @!attribute [rw] seconds + # @return [::Integer] + # Signed seconds of the span of time. Must be from -315,576,000,000 + # to +315,576,000,000 inclusive. Note: these bounds are computed from: + # 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + # @!attribute [rw] nanos + # @return [::Integer] + # Signed fractions of a second at nanosecond resolution of the span + # of time. Durations less than one second are represented with a 0 + # `seconds` field and a positive or negative `nanos` field. For durations + # of one second or more, a non-zero value for the `nanos` field must be + # of the same sign as the `seconds` field. Must be from -999,999,999 + # to +999,999,999 inclusive. + class Duration + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb new file mode 100644 index 000000000000..8c6b19d52e3d --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Protobuf + # A generic empty message that you can re-use to avoid defining duplicated + # empty messages in your APIs. A typical example is to use it as the request + # or the response type of an API method. For instance: + # + # service Foo { + # rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + # } + class Empty + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb new file mode 100644 index 000000000000..8e7abcf8f052 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb @@ -0,0 +1,229 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Protobuf + # `FieldMask` represents a set of symbolic field paths, for example: + # + # paths: "f.a" + # paths: "f.b.d" + # + # Here `f` represents a field in some root message, `a` and `b` + # fields in the message found in `f`, and `d` a field found in the + # message in `f.b`. + # + # Field masks are used to specify a subset of fields that should be + # returned by a get operation or modified by an update operation. + # Field masks also have a custom JSON encoding (see below). + # + # # Field Masks in Projections + # + # When used in the context of a projection, a response message or + # sub-message is filtered by the API to only contain those fields as + # specified in the mask. For example, if the mask in the previous + # example is applied to a response message as follows: + # + # f { + # a : 22 + # b { + # d : 1 + # x : 2 + # } + # y : 13 + # } + # z: 8 + # + # The result will not contain specific values for fields x,y and z + # (their value will be set to the default, and omitted in proto text + # output): + # + # + # f { + # a : 22 + # b { + # d : 1 + # } + # } + # + # A repeated field is not allowed except at the last position of a + # paths string. + # + # If a FieldMask object is not present in a get operation, the + # operation applies to all fields (as if a FieldMask of all fields + # had been specified). + # + # Note that a field mask does not necessarily apply to the + # top-level response message. In case of a REST get operation, the + # field mask applies directly to the response, but in case of a REST + # list operation, the mask instead applies to each individual message + # in the returned resource list. In case of a REST custom method, + # other definitions may be used. Where the mask applies will be + # clearly documented together with its declaration in the API. In + # any case, the effect on the returned resource/resources is required + # behavior for APIs. + # + # # Field Masks in Update Operations + # + # A field mask in update operations specifies which fields of the + # targeted resource are going to be updated. The API is required + # to only change the values of the fields as specified in the mask + # and leave the others untouched. If a resource is passed in to + # describe the updated values, the API ignores the values of all + # fields not covered by the mask. + # + # If a repeated field is specified for an update operation, new values will + # be appended to the existing repeated field in the target resource. Note that + # a repeated field is only allowed in the last position of a `paths` string. + # + # If a sub-message is specified in the last position of the field mask for an + # update operation, then new value will be merged into the existing sub-message + # in the target resource. + # + # For example, given the target message: + # + # f { + # b { + # d: 1 + # x: 2 + # } + # c: [1] + # } + # + # And an update message: + # + # f { + # b { + # d: 10 + # } + # c: [2] + # } + # + # then if the field mask is: + # + # paths: ["f.b", "f.c"] + # + # then the result will be: + # + # f { + # b { + # d: 10 + # x: 2 + # } + # c: [1, 2] + # } + # + # An implementation may provide options to override this default behavior for + # repeated and message fields. + # + # In order to reset a field's value to the default, the field must + # be in the mask and set to the default value in the provided resource. + # Hence, in order to reset all fields of a resource, provide a default + # instance of the resource and set all fields in the mask, or do + # not provide a mask as described below. + # + # If a field mask is not present on update, the operation applies to + # all fields (as if a field mask of all fields has been specified). + # Note that in the presence of schema evolution, this may mean that + # fields the client does not know and has therefore not filled into + # the request will be reset to their default. If this is unwanted + # behavior, a specific service may require a client to always specify + # a field mask, producing an error if not. + # + # As with get operations, the location of the resource which + # describes the updated values in the request message depends on the + # operation kind. In any case, the effect of the field mask is + # required to be honored by the API. + # + # ## Considerations for HTTP REST + # + # The HTTP kind of an update operation which uses a field mask must + # be set to PATCH instead of PUT in order to satisfy HTTP semantics + # (PUT must only be used for full updates). + # + # # JSON Encoding of Field Masks + # + # In JSON, a field mask is encoded as a single string where paths are + # separated by a comma. Fields name in each path are converted + # to/from lower-camel naming conventions. + # + # As an example, consider the following message declarations: + # + # message Profile { + # User user = 1; + # Photo photo = 2; + # } + # message User { + # string display_name = 1; + # string address = 2; + # } + # + # In proto a field mask for `Profile` may look as such: + # + # mask { + # paths: "user.display_name" + # paths: "photo" + # } + # + # In JSON, the same mask is represented as below: + # + # { + # mask: "user.displayName,photo" + # } + # + # # Field Masks and Oneof Fields + # + # Field masks treat fields in oneofs just as regular fields. Consider the + # following message: + # + # message SampleMessage { + # oneof test_oneof { + # string name = 4; + # SubMessage sub_message = 9; + # } + # } + # + # The field mask can be: + # + # mask { + # paths: "name" + # } + # + # Or: + # + # mask { + # paths: "sub_message" + # } + # + # Note that oneof type names ("test_oneof" in this case) cannot be used in + # paths. + # + # ## Field Mask Verification + # + # The implementation of any API method which has a FieldMask type field in the + # request should verify the included field paths, and return an + # `INVALID_ARGUMENT` error if any path is unmappable. + # @!attribute [rw] paths + # @return [::Array<::String>] + # The set of field mask paths. + class FieldMask + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb new file mode 100644 index 000000000000..9e96368be9d9 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb @@ -0,0 +1,96 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Protobuf + # `Struct` represents a structured data value, consisting of fields + # which map to dynamically typed values. In some languages, `Struct` + # might be supported by a native representation. For example, in + # scripting languages like JS a struct is represented as an + # object. The details of that representation are described together + # with the proto support for the language. + # + # The JSON representation for `Struct` is JSON object. + # @!attribute [rw] fields + # @return [::Google::Protobuf::Map{::String => ::Google::Protobuf::Value}] + # Unordered map of dynamically typed values. + class Struct + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + + # @!attribute [rw] key + # @return [::String] + # @!attribute [rw] value + # @return [::Google::Protobuf::Value] + class FieldsEntry + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end + + # `Value` represents a dynamically typed value which can be either + # null, a number, a string, a boolean, a recursive struct value, or a + # list of values. A producer of value is expected to set one of these + # variants. Absence of any variant indicates an error. + # + # The JSON representation for `Value` is JSON value. + # @!attribute [rw] null_value + # @return [::Google::Protobuf::NullValue] + # Represents a null value. + # @!attribute [rw] number_value + # @return [::Float] + # Represents a double value. + # @!attribute [rw] string_value + # @return [::String] + # Represents a string value. + # @!attribute [rw] bool_value + # @return [::Boolean] + # Represents a boolean value. + # @!attribute [rw] struct_value + # @return [::Google::Protobuf::Struct] + # Represents a structured value. + # @!attribute [rw] list_value + # @return [::Google::Protobuf::ListValue] + # Represents a repeated `Value`. + class Value + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # `ListValue` is a wrapper around a repeated field of values. + # + # The JSON representation for `ListValue` is JSON array. + # @!attribute [rw] values + # @return [::Array<::Google::Protobuf::Value>] + # Repeated field of dynamically typed values. + class ListValue + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # `NullValue` is a singleton enumeration to represent the null value for the + # `Value` type union. + # + # The JSON representation for `NullValue` is JSON `null`. + module NullValue + # Null value. + NULL_VALUE = 0 + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb new file mode 100644 index 000000000000..4ac9c4801a3f --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Protobuf + # A Timestamp represents a point in time independent of any time zone or local + # calendar, encoded as a count of seconds and fractions of seconds at + # nanosecond resolution. The count is relative to an epoch at UTC midnight on + # January 1, 1970, in the proleptic Gregorian calendar which extends the + # Gregorian calendar backwards to year one. + # + # All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + # second table is needed for interpretation, using a [24-hour linear + # smear](https://developers.google.com/time/smear). + # + # The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + # restricting to that range, we ensure that we can convert to and from [RFC + # 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + # + # # Examples + # + # Example 1: Compute Timestamp from POSIX `time()`. + # + # Timestamp timestamp; + # timestamp.set_seconds(time(NULL)); + # timestamp.set_nanos(0); + # + # Example 2: Compute Timestamp from POSIX `gettimeofday()`. + # + # struct timeval tv; + # gettimeofday(&tv, NULL); + # + # Timestamp timestamp; + # timestamp.set_seconds(tv.tv_sec); + # timestamp.set_nanos(tv.tv_usec * 1000); + # + # Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + # + # FILETIME ft; + # GetSystemTimeAsFileTime(&ft); + # UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + # + # // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + # // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + # Timestamp timestamp; + # timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + # timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + # + # Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + # + # long millis = System.currentTimeMillis(); + # + # Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + # .setNanos((int) ((millis % 1000) * 1000000)).build(); + # + # Example 5: Compute Timestamp from Java `Instant.now()`. + # + # Instant now = Instant.now(); + # + # Timestamp timestamp = + # Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + # .setNanos(now.getNano()).build(); + # + # Example 6: Compute Timestamp from current time in Python. + # + # timestamp = Timestamp() + # timestamp.GetCurrentTime() + # + # # JSON Mapping + # + # In JSON format, the Timestamp type is encoded as a string in the + # [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + # format is "\\{year}-\\{month}-\\{day}T\\{hour}:\\{min}:\\{sec}[.\\{frac_sec}]Z" + # where \\{year} is always expressed using four digits while \\{month}, \\{day}, + # \\{hour}, \\{min}, and \\{sec} are zero-padded to two digits each. The fractional + # seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + # are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + # is required. A proto3 JSON serializer should always use UTC (as indicated by + # "Z") when printing the Timestamp type and a proto3 JSON parser should be + # able to accept both UTC and other timezones (as indicated by an offset). + # + # For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + # 01:30 UTC on January 15, 2017. + # + # In JavaScript, one can convert a Date object to this format using the + # standard + # [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + # method. In Python, a standard `datetime.datetime` object can be converted + # to this format using + # [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + # the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + # the Joda Time's [`ISODateTimeFormat.dateTime()`]( + # http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() + # ) to obtain a formatter capable of generating timestamps in this format. + # @!attribute [rw] seconds + # @return [::Integer] + # Represents seconds of UTC time since Unix epoch + # 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + # 9999-12-31T23:59:59Z inclusive. + # @!attribute [rw] nanos + # @return [::Integer] + # Non-negative fractions of a second at nanosecond resolution. Negative + # second values with fractions must still have non-negative nanos values + # that count forward in time. Must be from 0 to 999,999,999 + # inclusive. + class Timestamp + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb new file mode 100644 index 000000000000..5160138862c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb @@ -0,0 +1,121 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Protobuf + # Wrapper message for `double`. + # + # The JSON representation for `DoubleValue` is JSON number. + # @!attribute [rw] value + # @return [::Float] + # The double value. + class DoubleValue + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `float`. + # + # The JSON representation for `FloatValue` is JSON number. + # @!attribute [rw] value + # @return [::Float] + # The float value. + class FloatValue + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `int64`. + # + # The JSON representation for `Int64Value` is JSON string. + # @!attribute [rw] value + # @return [::Integer] + # The int64 value. + class Int64Value + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `uint64`. + # + # The JSON representation for `UInt64Value` is JSON string. + # @!attribute [rw] value + # @return [::Integer] + # The uint64 value. + class UInt64Value + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `int32`. + # + # The JSON representation for `Int32Value` is JSON number. + # @!attribute [rw] value + # @return [::Integer] + # The int32 value. + class Int32Value + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `uint32`. + # + # The JSON representation for `UInt32Value` is JSON number. + # @!attribute [rw] value + # @return [::Integer] + # The uint32 value. + class UInt32Value + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `bool`. + # + # The JSON representation for `BoolValue` is JSON `true` and `false`. + # @!attribute [rw] value + # @return [::Boolean] + # The bool value. + class BoolValue + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `string`. + # + # The JSON representation for `StringValue` is JSON string. + # @!attribute [rw] value + # @return [::String] + # The string value. + class StringValue + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Wrapper message for `bytes`. + # + # The JSON representation for `BytesValue` is JSON string. + # @!attribute [rw] value + # @return [::String] + # The bytes value. + class BytesValue + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb new file mode 100644 index 000000000000..09acc69b6125 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + + +module Google + module Rpc + # The `Status` type defines a logical error model that is suitable for + # different programming environments, including REST APIs and RPC APIs. It is + # used by [gRPC](https://github.com/grpc). Each `Status` message contains + # three pieces of data: error code, error message, and error details. + # + # You can find out more about this error model and how to work with it in the + # [API Design Guide](https://cloud.google.com/apis/design/errors). + # @!attribute [rw] code + # @return [::Integer] + # The status code, which should be an enum value of + # [google.rpc.Code][google.rpc.Code]. + # @!attribute [rw] message + # @return [::String] + # A developer-facing error message, which should be in English. Any + # user-facing error message should be localized and sent in the + # {::Google::Rpc::Status#details google.rpc.Status.details} field, or localized + # by the client. + # @!attribute [rw] details + # @return [::Array<::Google::Protobuf::Any>] + # A list of messages that carry the error details. There is a common set of + # message types for APIs to use. + class Status + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile new file mode 100644 index 000000000000..27eda5dbdbd1 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +source "https://rubygems.org" + +if ENV["GOOGLE_CLOUD_SAMPLES_TEST"] == "master" + gem "google-cloud-bigquery-data_transfer-v1", path: "../" +else + gem "google-cloud-bigquery-data_transfer-v1" +end + +group :test do + gem "google-style", "~> 1.26.1" + gem "minitest", "~> 5.16" + gem "minitest-focus", "~> 1.1" + gem "minitest-hooks", "~> 1.5" +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb new file mode 100644 index 000000000000..2f41db133d0e --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the check_valid_creds call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#check_valid_creds. +# +def check_valid_creds + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new + + # Call the check_valid_creds method. + result = client.check_valid_creds request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb new file mode 100644 index 000000000000..297f5d8bf175 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the create_transfer_config call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#create_transfer_config. +# +def create_transfer_config + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new + + # Call the create_transfer_config method. + result = client.create_transfer_config request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb new file mode 100644 index 000000000000..1d5c08777839 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the delete_transfer_config call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_config. +# +def delete_transfer_config + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new + + # Call the delete_transfer_config method. + result = client.delete_transfer_config request + + # The returned object is of type Google::Protobuf::Empty. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb new file mode 100644 index 000000000000..79209dcf9fe9 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the delete_transfer_run call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_run. +# +def delete_transfer_run + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new + + # Call the delete_transfer_run method. + result = client.delete_transfer_run request + + # The returned object is of type Google::Protobuf::Empty. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb new file mode 100644 index 000000000000..5159c1b9658a --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the enroll_data_sources call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#enroll_data_sources. +# +def enroll_data_sources + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new + + # Call the enroll_data_sources method. + result = client.enroll_data_sources request + + # The returned object is of type Google::Protobuf::Empty. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb new file mode 100644 index 000000000000..2eb9bdd308f1 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the get_data_source call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_data_source. +# +def get_data_source + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new + + # Call the get_data_source method. + result = client.get_data_source request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb new file mode 100644 index 000000000000..c8b1dfaaf116 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the get_transfer_config call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_config. +# +def get_transfer_config + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new + + # Call the get_transfer_config method. + result = client.get_transfer_config request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb new file mode 100644 index 000000000000..8e738239a0a7 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the get_transfer_run call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_run. +# +def get_transfer_run + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new + + # Call the get_transfer_run method. + result = client.get_transfer_run request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb new file mode 100644 index 000000000000..c970382cedfa --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the list_data_sources call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_data_sources. +# +def list_data_sources + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new + + # Call the list_data_sources method. + result = client.list_data_sources request + + # The returned object is of type Gapic::PagedEnumerable. You can iterate + # over elements, and API calls will be issued to fetch pages as needed. + result.each do |item| + # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource. + p item + end +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb new file mode 100644 index 000000000000..6793c3706c00 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the list_transfer_configs call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_configs. +# +def list_transfer_configs + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new + + # Call the list_transfer_configs method. + result = client.list_transfer_configs request + + # The returned object is of type Gapic::PagedEnumerable. You can iterate + # over elements, and API calls will be issued to fetch pages as needed. + result.each do |item| + # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + p item + end +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb new file mode 100644 index 000000000000..456c8d63b7ad --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the list_transfer_logs call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_logs. +# +def list_transfer_logs + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new + + # Call the list_transfer_logs method. + result = client.list_transfer_logs request + + # The returned object is of type Gapic::PagedEnumerable. You can iterate + # over elements, and API calls will be issued to fetch pages as needed. + result.each do |item| + # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage. + p item + end +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb new file mode 100644 index 000000000000..c5f588f2844d --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the list_transfer_runs call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_runs. +# +def list_transfer_runs + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new + + # Call the list_transfer_runs method. + result = client.list_transfer_runs request + + # The returned object is of type Gapic::PagedEnumerable. You can iterate + # over elements, and API calls will be issued to fetch pages as needed. + result.each do |item| + # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. + p item + end +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb new file mode 100644 index 000000000000..a347a1fc13e9 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the schedule_transfer_runs call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#schedule_transfer_runs. +# +def schedule_transfer_runs + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new + + # Call the schedule_transfer_runs method. + result = client.schedule_transfer_runs request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb new file mode 100644 index 000000000000..0eda612768c0 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the start_manual_transfer_runs call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#start_manual_transfer_runs. +# +def start_manual_transfer_runs + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new + + # Call the start_manual_transfer_runs method. + result = client.start_manual_transfer_runs request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb new file mode 100644 index 000000000000..357c7fb83f83 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the unenroll_data_sources call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#unenroll_data_sources. +# +def unenroll_data_sources + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new + + # Call the unenroll_data_sources method. + result = client.unenroll_data_sources request + + # The returned object is of type Google::Protobuf::Empty. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb new file mode 100644 index 000000000000..74f806b153a9 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +# [START bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync] +require "google/cloud/bigquery/data_transfer/v1" + +## +# Snippet for the update_transfer_config call in the DataTransferService service +# +# This snippet has been automatically generated and should be regarded as a code +# template only. It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in https://cloud.google.com/ruby/docs/reference. +# +# This is an auto-generated example demonstrating basic usage of +# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#update_transfer_config. +# +def update_transfer_config + # Create a client object. The client can be reused for multiple calls. + client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new + + # Create a request. To set request fields, pass in keyword arguments. + request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new + + # Call the update_transfer_config method. + result = client.update_transfer_config request + + # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. + p result +end +# [END bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json new file mode 100644 index 000000000000..c079afe66ffd --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -0,0 +1,655 @@ +{ + "client_library": { + "name": "google-cloud-bigquery-data_transfer-v1", + "version": "", + "language": "RUBY", + "apis": [ + { + "id": "google.cloud.bigquery.datatransfer.v1", + "version": "v1" + } + ] + }, + "snippets": [ + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync", + "title": "Snippet for the get_data_source call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_data_source.", + "file": "data_transfer_service/get_data_source.rb", + "language": "RUBY", + "client_method": { + "short_name": "get_data_source", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_data_source", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::DataSource", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "GetDataSource", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetDataSource", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync", + "title": "Snippet for the list_data_sources call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_data_sources.", + "file": "data_transfer_service/list_data_sources.rb", + "language": "RUBY", + "client_method": { + "short_name": "list_data_sources", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_data_sources", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "ListDataSources", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListDataSources", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 50, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync", + "title": "Snippet for the create_transfer_config call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#create_transfer_config.", + "file": "data_transfer_service/create_transfer_config.rb", + "language": "RUBY", + "client_method": { + "short_name": "create_transfer_config", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#create_transfer_config", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "CreateTransferConfig", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync", + "title": "Snippet for the update_transfer_config call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#update_transfer_config.", + "file": "data_transfer_service/update_transfer_config.rb", + "language": "RUBY", + "client_method": { + "short_name": "update_transfer_config", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#update_transfer_config", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "UpdateTransferConfig", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync", + "title": "Snippet for the delete_transfer_config call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_config.", + "file": "data_transfer_service/delete_transfer_config.rb", + "language": "RUBY", + "client_method": { + "short_name": "delete_transfer_config", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_config", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest", + "name": "request" + } + ], + "result_type": "::Google::Protobuf::Empty", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "DeleteTransferConfig", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync", + "title": "Snippet for the get_transfer_config call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_config.", + "file": "data_transfer_service/get_transfer_config.rb", + "language": "RUBY", + "client_method": { + "short_name": "get_transfer_config", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_config", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "GetTransferConfig", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferConfig", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync", + "title": "Snippet for the list_transfer_configs call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_configs.", + "file": "data_transfer_service/list_transfer_configs.rb", + "language": "RUBY", + "client_method": { + "short_name": "list_transfer_configs", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_configs", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "ListTransferConfigs", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferConfigs", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 50, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync", + "title": "Snippet for the schedule_transfer_runs call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#schedule_transfer_runs.", + "file": "data_transfer_service/schedule_transfer_runs.rb", + "language": "RUBY", + "client_method": { + "short_name": "schedule_transfer_runs", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#schedule_transfer_runs", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "ScheduleTransferRuns", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync", + "title": "Snippet for the start_manual_transfer_runs call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#start_manual_transfer_runs.", + "file": "data_transfer_service/start_manual_transfer_runs.rb", + "language": "RUBY", + "client_method": { + "short_name": "start_manual_transfer_runs", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#start_manual_transfer_runs", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "StartManualTransferRuns", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.StartManualTransferRuns", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync", + "title": "Snippet for the get_transfer_run call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_run.", + "file": "data_transfer_service/get_transfer_run.rb", + "language": "RUBY", + "client_method": { + "short_name": "get_transfer_run", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_run", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "GetTransferRun", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferRun", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync", + "title": "Snippet for the delete_transfer_run call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_run.", + "file": "data_transfer_service/delete_transfer_run.rb", + "language": "RUBY", + "client_method": { + "short_name": "delete_transfer_run", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_run", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest", + "name": "request" + } + ], + "result_type": "::Google::Protobuf::Empty", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "DeleteTransferRun", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync", + "title": "Snippet for the list_transfer_runs call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_runs.", + "file": "data_transfer_service/list_transfer_runs.rb", + "language": "RUBY", + "client_method": { + "short_name": "list_transfer_runs", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_runs", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "ListTransferRuns", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferRuns", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 50, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync", + "title": "Snippet for the list_transfer_logs call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_logs.", + "file": "data_transfer_service/list_transfer_logs.rb", + "language": "RUBY", + "client_method": { + "short_name": "list_transfer_logs", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_logs", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "ListTransferLogs", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferLogs", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 50, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync", + "title": "Snippet for the check_valid_creds call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#check_valid_creds.", + "file": "data_transfer_service/check_valid_creds.rb", + "language": "RUBY", + "client_method": { + "short_name": "check_valid_creds", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#check_valid_creds", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest", + "name": "request" + } + ], + "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "CheckValidCreds", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CheckValidCreds", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync", + "title": "Snippet for the enroll_data_sources call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#enroll_data_sources.", + "file": "data_transfer_service/enroll_data_sources.rb", + "language": "RUBY", + "client_method": { + "short_name": "enroll_data_sources", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#enroll_data_sources", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest", + "name": "request" + } + ], + "result_type": "::Google::Protobuf::Empty", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "EnrollDataSources", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.EnrollDataSources", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + }, + { + "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync", + "title": "Snippet for the unenroll_data_sources call in the DataTransferService service", + "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#unenroll_data_sources.", + "file": "data_transfer_service/unenroll_data_sources.rb", + "language": "RUBY", + "client_method": { + "short_name": "unenroll_data_sources", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#unenroll_data_sources", + "async": false, + "parameters": [ + { + "type": "::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest", + "name": "request" + } + ], + "result_type": "::Google::Protobuf::Empty", + "client": { + "short_name": "DataTransferService::Client", + "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" + }, + "method": { + "short_name": "UnenrollDataSources", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UnenrollDataSources", + "service": { + "short_name": "DataTransferService", + "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" + } + } + }, + "canonical": true, + "origin": "API_DEFINITION", + "segments": [ + { + "start": 20, + "end": 46, + "type": "FULL" + } + ] + } + ] +} \ No newline at end of file diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb new file mode 100644 index 000000000000..054310efd5f4 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb @@ -0,0 +1,104 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "helper" + +require "gapic/grpc/service_stub" + +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" + +class ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::ClientPathsTest < Minitest::Test + class DummyStub + def endpoint + "endpoint.example.com" + end + + def universe_domain + "example.com" + end + end + + def test_data_source_path + grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + ::Gapic::ServiceStub.stub :new, DummyStub.new do + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + path = client.data_source_path project: "value0", data_source: "value1" + assert_equal "projects/value0/dataSources/value1", path + + path = client.data_source_path project: "value0", location: "value1", data_source: "value2" + assert_equal "projects/value0/locations/value1/dataSources/value2", path + end + end + + def test_location_path + grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + ::Gapic::ServiceStub.stub :new, DummyStub.new do + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + path = client.location_path project: "value0", location: "value1" + assert_equal "projects/value0/locations/value1", path + end + end + + def test_project_path + grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + ::Gapic::ServiceStub.stub :new, DummyStub.new do + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + path = client.project_path project: "value0" + assert_equal "projects/value0", path + end + end + + def test_run_path + grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + ::Gapic::ServiceStub.stub :new, DummyStub.new do + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + path = client.run_path project: "value0", transfer_config: "value1", run: "value2" + assert_equal "projects/value0/transferConfigs/value1/runs/value2", path + + path = client.run_path project: "value0", location: "value1", transfer_config: "value2", run: "value3" + assert_equal "projects/value0/locations/value1/transferConfigs/value2/runs/value3", path + end + end + + def test_transfer_config_path + grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + ::Gapic::ServiceStub.stub :new, DummyStub.new do + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + path = client.transfer_config_path project: "value0", transfer_config: "value1" + assert_equal "projects/value0/transferConfigs/value1", path + + path = client.transfer_config_path project: "value0", location: "value1", transfer_config: "value2" + assert_equal "projects/value0/locations/value1/transferConfigs/value2", path + end + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb new file mode 100644 index 000000000000..2f9ff48637f1 --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb @@ -0,0 +1,980 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "helper" +require "gapic/rest" +require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" + + +class ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ClientTest < Minitest::Test + class ClientStub + attr_accessor :call_count, :requests + + def initialize response, &block + @response = response + @block = block + @call_count = 0 + @requests = [] + end + + def make_get_request uri:, params: {}, options: {} + make_http_request :get, uri: uri, body: nil, params: params, options: options + end + + def make_delete_request uri:, params: {}, options: {} + make_http_request :delete, uri: uri, body: nil, params: params, options: options + end + + def make_post_request uri:, body: nil, params: {}, options: {} + make_http_request :post, uri: uri, body: body, params: params, options: options + end + + def make_patch_request uri:, body:, params: {}, options: {} + make_http_request :patch, uri: uri, body: body, params: params, options: options + end + + def make_put_request uri:, body:, params: {}, options: {} + make_http_request :put, uri: uri, body: body, params: params, options: options + end + + def make_http_request *args, **kwargs + @call_count += 1 + + @requests << @block&.call(*args, **kwargs) + + @response + end + + def endpoint + "endpoint.example.com" + end + + def universe_domain + "example.com" + end + end + + def test_get_data_source + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + get_data_source_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_get_data_source_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, get_data_source_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.get_data_source({ name: name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.get_data_source name: name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.get_data_source ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.get_data_source({ name: name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.get_data_source(::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, get_data_source_client_stub.call_count + end + end + end + + def test_list_data_sources + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + page_token = "hello world" + page_size = 42 + + list_data_sources_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_data_sources_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, list_data_sources_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.list_data_sources parent: parent, page_token: page_token, page_size: page_size do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.list_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.list_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, list_data_sources_client_stub.call_count + end + end + end + + def test_create_transfer_config + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + transfer_config = {} + authorization_code = "hello world" + version_info = "hello world" + service_account_name = "hello world" + + create_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_create_transfer_config_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, create_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.create_transfer_config parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.create_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.create_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, create_transfer_config_client_stub.call_count + end + end + end + + def test_update_transfer_config + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + transfer_config = {} + authorization_code = "hello world" + update_mask = {} + version_info = "hello world" + service_account_name = "hello world" + + update_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_update_transfer_config_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, update_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.update_transfer_config transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.update_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.update_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, update_transfer_config_client_stub.call_count + end + end + end + + def test_delete_transfer_config + # Create test objects. + client_result = ::Google::Protobuf::Empty.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + delete_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_delete_transfer_config_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, delete_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.delete_transfer_config({ name: name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.delete_transfer_config name: name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.delete_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.delete_transfer_config({ name: name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.delete_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, delete_transfer_config_client_stub.call_count + end + end + end + + def test_get_transfer_config + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + get_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_get_transfer_config_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, get_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.get_transfer_config({ name: name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.get_transfer_config name: name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.get_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.get_transfer_config({ name: name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.get_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, get_transfer_config_client_stub.call_count + end + end + end + + def test_list_transfer_configs + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + data_source_ids = ["hello world"] + page_token = "hello world" + page_size = 42 + + list_transfer_configs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_transfer_configs_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, list_transfer_configs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.list_transfer_configs parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.list_transfer_configs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.list_transfer_configs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, list_transfer_configs_client_stub.call_count + end + end + end + + def test_schedule_transfer_runs + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + start_time = {} + end_time = {} + + schedule_transfer_runs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_schedule_transfer_runs_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, schedule_transfer_runs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.schedule_transfer_runs parent: parent, start_time: start_time, end_time: end_time do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.schedule_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.schedule_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, schedule_transfer_runs_client_stub.call_count + end + end + end + + def test_start_manual_transfer_runs + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + requested_time_range = {} + + start_manual_transfer_runs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_start_manual_transfer_runs_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, start_manual_transfer_runs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.start_manual_transfer_runs parent: parent, requested_time_range: requested_time_range do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.start_manual_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.start_manual_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, start_manual_transfer_runs_client_stub.call_count + end + end + end + + def test_get_transfer_run + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + get_transfer_run_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_get_transfer_run_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, get_transfer_run_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.get_transfer_run({ name: name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.get_transfer_run name: name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.get_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.get_transfer_run({ name: name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.get_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, get_transfer_run_client_stub.call_count + end + end + end + + def test_delete_transfer_run + # Create test objects. + client_result = ::Google::Protobuf::Empty.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + delete_transfer_run_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_delete_transfer_run_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, delete_transfer_run_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.delete_transfer_run({ name: name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.delete_transfer_run name: name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.delete_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.delete_transfer_run({ name: name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.delete_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, delete_transfer_run_client_stub.call_count + end + end + end + + def test_list_transfer_runs + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + states = [:TRANSFER_STATE_UNSPECIFIED] + page_token = "hello world" + page_size = 42 + run_attempt = :RUN_ATTEMPT_UNSPECIFIED + + list_transfer_runs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_transfer_runs_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, list_transfer_runs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.list_transfer_runs parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.list_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.list_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, list_transfer_runs_client_stub.call_count + end + end + end + + def test_list_transfer_logs + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + page_token = "hello world" + page_size = 42 + message_types = [:MESSAGE_SEVERITY_UNSPECIFIED] + + list_transfer_logs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_transfer_logs_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, list_transfer_logs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.list_transfer_logs parent: parent, page_token: page_token, page_size: page_size, message_types: message_types do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.list_transfer_logs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.list_transfer_logs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, list_transfer_logs_client_stub.call_count + end + end + end + + def test_check_valid_creds + # Create test objects. + client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + check_valid_creds_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_check_valid_creds_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, check_valid_creds_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.check_valid_creds({ name: name }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.check_valid_creds name: name do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.check_valid_creds ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.check_valid_creds({ name: name }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.check_valid_creds(::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, check_valid_creds_client_stub.call_count + end + end + end + + def test_enroll_data_sources + # Create test objects. + client_result = ::Google::Protobuf::Empty.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + data_source_ids = ["hello world"] + + enroll_data_sources_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_enroll_data_sources_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, enroll_data_sources_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.enroll_data_sources name: name, data_source_ids: data_source_ids do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.enroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.enroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, enroll_data_sources_client_stub.call_count + end + end + end + + def test_unenroll_data_sources + # Create test objects. + client_result = ::Google::Protobuf::Empty.new + http_response = OpenStruct.new body: client_result.to_json + + call_options = {} + + # Create request parameters for a unary method. + name = "hello world" + data_source_ids = ["hello world"] + + unenroll_data_sources_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| + assert options.metadata.key? :"x-goog-api-client" + assert options.metadata[:"x-goog-api-client"].include? "rest" + refute options.metadata[:"x-goog-api-client"].include? "grpc" + end + + ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_unenroll_data_sources_request, ["", "", {}] do + Gapic::Rest::ClientStub.stub :new, unenroll_data_sources_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = :dummy_value + end + + # Use hash object + client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use named arguments + client.unenroll_data_sources name: name, data_source_ids: data_source_ids do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object + client.unenroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use hash object with options + client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }, call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Use protobuf object with options + client.unenroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), call_options) do |_result, response| + assert_equal http_response, response.underlying_op + end + + # Verify method calls + assert_equal 5, unenroll_data_sources_client_stub.call_count + end + end + end + + def test_configure + credentials_token = :dummy_value + + client = block_config = config = nil + dummy_stub = ClientStub.new nil + Gapic::Rest::ClientStub.stub :new, dummy_stub do + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| + config.credentials = credentials_token + end + end + + config = client.configure do |c| + block_config = c + end + + assert_same block_config, config + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration, config + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb new file mode 100644 index 000000000000..bfd435f6547c --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb @@ -0,0 +1,1075 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "helper" + +require "gapic/grpc/service_stub" + +require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" +require "google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb" +require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" + +class ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::ClientTest < Minitest::Test + class ClientStub + attr_accessor :call_rpc_count, :requests + + def initialize response, operation, &block + @response = response + @operation = operation + @block = block + @call_rpc_count = 0 + @requests = [] + end + + def call_rpc *args, **kwargs + @call_rpc_count += 1 + + @requests << @block&.call(*args, **kwargs) + + yield @response, @operation if block_given? + + @response + end + + def endpoint + "endpoint.example.com" + end + + def universe_domain + "example.com" + end + end + + def test_get_data_source + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + get_data_source_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :get_data_source, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, request + assert_equal "hello world", request["name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, get_data_source_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.get_data_source({ name: name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.get_data_source name: name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.get_data_source ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.get_data_source({ name: name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.get_data_source(::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, get_data_source_client_stub.call_rpc_count + end + end + + def test_list_data_sources + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + page_token = "hello world" + page_size = 42 + + list_data_sources_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :list_data_sources, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, request + assert_equal "hello world", request["parent"] + assert_equal "hello world", request["page_token"] + assert_equal 42, request["page_size"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, list_data_sources_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.list_data_sources parent: parent, page_token: page_token, page_size: page_size do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.list_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.list_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size), grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, list_data_sources_client_stub.call_rpc_count + end + end + + def test_create_transfer_config + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + transfer_config = {} + authorization_code = "hello world" + version_info = "hello world" + service_account_name = "hello world" + + create_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :create_transfer_config, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, request + assert_equal "hello world", request["parent"] + assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig), request["transfer_config"] + assert_equal "hello world", request["authorization_code"] + assert_equal "hello world", request["version_info"] + assert_equal "hello world", request["service_account_name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, create_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.create_transfer_config parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.create_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.create_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, create_transfer_config_client_stub.call_rpc_count + end + end + + def test_update_transfer_config + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + transfer_config = {} + authorization_code = "hello world" + update_mask = {} + version_info = "hello world" + service_account_name = "hello world" + + update_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :update_transfer_config, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, request + assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig), request["transfer_config"] + assert_equal "hello world", request["authorization_code"] + assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"] + assert_equal "hello world", request["version_info"] + assert_equal "hello world", request["service_account_name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, update_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.update_transfer_config transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.update_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.update_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, update_transfer_config_client_stub.call_rpc_count + end + end + + def test_delete_transfer_config + # Create GRPC objects. + grpc_response = ::Google::Protobuf::Empty.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + delete_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :delete_transfer_config, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, request + assert_equal "hello world", request["name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, delete_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.delete_transfer_config({ name: name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.delete_transfer_config name: name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.delete_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.delete_transfer_config({ name: name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.delete_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, delete_transfer_config_client_stub.call_rpc_count + end + end + + def test_get_transfer_config + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + get_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :get_transfer_config, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, request + assert_equal "hello world", request["name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, get_transfer_config_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.get_transfer_config({ name: name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.get_transfer_config name: name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.get_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.get_transfer_config({ name: name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.get_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, get_transfer_config_client_stub.call_rpc_count + end + end + + def test_list_transfer_configs + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + data_source_ids = ["hello world"] + page_token = "hello world" + page_size = 42 + + list_transfer_configs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :list_transfer_configs, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, request + assert_equal "hello world", request["parent"] + assert_equal ["hello world"], request["data_source_ids"] + assert_equal "hello world", request["page_token"] + assert_equal 42, request["page_size"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, list_transfer_configs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.list_transfer_configs parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.list_transfer_configs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.list_transfer_configs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size), grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, list_transfer_configs_client_stub.call_rpc_count + end + end + + def test_schedule_transfer_runs + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + start_time = {} + end_time = {} + + schedule_transfer_runs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :schedule_transfer_runs, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, request + assert_equal "hello world", request["parent"] + assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["start_time"] + assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["end_time"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, schedule_transfer_runs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.schedule_transfer_runs parent: parent, start_time: start_time, end_time: end_time do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.schedule_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.schedule_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, schedule_transfer_runs_client_stub.call_rpc_count + end + end + + def test_start_manual_transfer_runs + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + requested_time_range = {} + + start_manual_transfer_runs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :start_manual_transfer_runs, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, request + assert_equal "hello world", request["parent"] + assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange), request["requested_time_range"] + assert_equal :requested_time_range, request.time + refute_nil options + end + + Gapic::ServiceStub.stub :new, start_manual_transfer_runs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.start_manual_transfer_runs parent: parent, requested_time_range: requested_time_range do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.start_manual_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.start_manual_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, start_manual_transfer_runs_client_stub.call_rpc_count + end + end + + def test_get_transfer_run + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + get_transfer_run_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :get_transfer_run, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, request + assert_equal "hello world", request["name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, get_transfer_run_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.get_transfer_run({ name: name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.get_transfer_run name: name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.get_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.get_transfer_run({ name: name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.get_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, get_transfer_run_client_stub.call_rpc_count + end + end + + def test_delete_transfer_run + # Create GRPC objects. + grpc_response = ::Google::Protobuf::Empty.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + delete_transfer_run_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :delete_transfer_run, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, request + assert_equal "hello world", request["name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, delete_transfer_run_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.delete_transfer_run({ name: name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.delete_transfer_run name: name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.delete_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.delete_transfer_run({ name: name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.delete_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, delete_transfer_run_client_stub.call_rpc_count + end + end + + def test_list_transfer_runs + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + states = [:TRANSFER_STATE_UNSPECIFIED] + page_token = "hello world" + page_size = 42 + run_attempt = :RUN_ATTEMPT_UNSPECIFIED + + list_transfer_runs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :list_transfer_runs, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, request + assert_equal "hello world", request["parent"] + assert_equal [:TRANSFER_STATE_UNSPECIFIED], request["states"] + assert_equal "hello world", request["page_token"] + assert_equal 42, request["page_size"] + assert_equal :RUN_ATTEMPT_UNSPECIFIED, request["run_attempt"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, list_transfer_runs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.list_transfer_runs parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.list_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }, grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.list_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt), grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, list_transfer_runs_client_stub.call_rpc_count + end + end + + def test_list_transfer_logs + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + parent = "hello world" + page_token = "hello world" + page_size = 42 + message_types = [:MESSAGE_SEVERITY_UNSPECIFIED] + + list_transfer_logs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :list_transfer_logs, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, request + assert_equal "hello world", request["parent"] + assert_equal "hello world", request["page_token"] + assert_equal 42, request["page_size"] + assert_equal [:MESSAGE_SEVERITY_UNSPECIFIED], request["message_types"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, list_transfer_logs_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.list_transfer_logs parent: parent, page_token: page_token, page_size: page_size, message_types: message_types do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.list_transfer_logs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }, grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.list_transfer_logs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types), grpc_options) do |response, operation| + assert_kind_of Gapic::PagedEnumerable, response + assert_equal grpc_response, response.response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, list_transfer_logs_client_stub.call_rpc_count + end + end + + def test_check_valid_creds + # Create GRPC objects. + grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + + check_valid_creds_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :check_valid_creds, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, request + assert_equal "hello world", request["name"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, check_valid_creds_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.check_valid_creds({ name: name }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.check_valid_creds name: name do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.check_valid_creds ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.check_valid_creds({ name: name }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.check_valid_creds(::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, check_valid_creds_client_stub.call_rpc_count + end + end + + def test_enroll_data_sources + # Create GRPC objects. + grpc_response = ::Google::Protobuf::Empty.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + data_source_ids = ["hello world"] + + enroll_data_sources_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :enroll_data_sources, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, request + assert_equal "hello world", request["name"] + assert_equal ["hello world"], request["data_source_ids"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, enroll_data_sources_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.enroll_data_sources name: name, data_source_ids: data_source_ids do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.enroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.enroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, enroll_data_sources_client_stub.call_rpc_count + end + end + + def test_unenroll_data_sources + # Create GRPC objects. + grpc_response = ::Google::Protobuf::Empty.new + grpc_operation = GRPC::ActiveCall::Operation.new nil + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + grpc_options = {} + + # Create request parameters for a unary method. + name = "hello world" + data_source_ids = ["hello world"] + + unenroll_data_sources_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| + assert_equal :unenroll_data_sources, name + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, request + assert_equal "hello world", request["name"] + assert_equal ["hello world"], request["data_source_ids"] + refute_nil options + end + + Gapic::ServiceStub.stub :new, unenroll_data_sources_client_stub do + # Create client + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + + # Use hash object + client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use named arguments + client.unenroll_data_sources name: name, data_source_ids: data_source_ids do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object + client.unenroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use hash object with options + client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }, grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Use protobuf object with options + client.unenroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), grpc_options) do |response, operation| + assert_equal grpc_response, response + assert_equal grpc_operation, operation + end + + # Verify method calls + assert_equal 5, unenroll_data_sources_client_stub.call_rpc_count + end + end + + def test_configure + grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure + + client = block_config = config = nil + dummy_stub = ClientStub.new nil, nil + Gapic::ServiceStub.stub :new, dummy_stub do + client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| + config.credentials = grpc_channel + end + end + + config = client.configure do |c| + block_config = c + end + + assert_same block_config, config + assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration, config + end +end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb new file mode 100644 index 000000000000..48407bca7edb --- /dev/null +++ b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Auto-generated by gapic-generator-ruby. DO NOT EDIT! + +require "minitest/autorun" +require "minitest/focus" +require "minitest/rg" + +require "grpc" + +require "ostruct" From 6b6aeb96651d602838867c1dfac589700932778e Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 30 Sep 2024 18:57:07 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../bigquery/datatransfer/v1/transfer_pb.rb | 8 +- .../bigquery/datatransfer/v1/transfer.rb | 82 + .../.gitignore | 22 - .../.repo-metadata.json | 18 - .../.rubocop.yml | 33 - .../.toys.rb | 28 - .../.yardopts | 12 - .../AUTHENTICATION.md | 122 - .../CHANGELOG.md | 2 - .../Gemfile | 11 - .../LICENSE.md | 201 -- .../README.md | 144 -- .../Rakefile | 168 -- .../gapic_metadata.json | 98 - ...le-cloud-bigquery-data_transfer-v1.gemspec | 29 - .../google-cloud-bigquery-data_transfer-v1.rb | 21 - .../google/cloud/bigquery/data_transfer/v1.rb | 47 - .../data_transfer/v1/bindings_override.rb | 104 - .../data_transfer/v1/data_transfer_service.rb | 57 - .../v1/data_transfer_service/client.rb | 2127 ----------------- .../v1/data_transfer_service/credentials.rb | 53 - .../v1/data_transfer_service/paths.rb | 193 -- .../v1/data_transfer_service/rest.rb | 55 - .../v1/data_transfer_service/rest/client.rb | 1995 ---------------- .../rest/service_stub.rb | 1133 --------- .../cloud/bigquery/data_transfer/v1/rest.rb | 40 - .../bigquery/data_transfer/v1/version.rb | 30 - .../datatransfer/v1/datatransfer_pb.rb | 89 - .../v1/datatransfer_services_pb.rb | 99 - .../bigquery/datatransfer/v1/transfer_pb.rb | 68 - .../proto_docs/README.md | 4 - .../proto_docs/google/api/client.rb | 420 ---- .../proto_docs/google/api/field_behavior.rb | 85 - .../proto_docs/google/api/launch_stage.rb | 71 - .../proto_docs/google/api/resource.rb | 227 -- .../bigquery/datatransfer/v1/datatransfer.rb | 722 ------ .../bigquery/datatransfer/v1/transfer.rb | 405 ---- .../proto_docs/google/protobuf/any.rb | 145 -- .../proto_docs/google/protobuf/duration.rb | 98 - .../proto_docs/google/protobuf/empty.rb | 34 - .../proto_docs/google/protobuf/field_mask.rb | 229 -- .../proto_docs/google/protobuf/struct.rb | 96 - .../proto_docs/google/protobuf/timestamp.rb | 127 - .../proto_docs/google/protobuf/wrappers.rb | 121 - .../proto_docs/google/rpc/status.rb | 48 - .../snippets/Gemfile | 32 - .../check_valid_creds.rb | 47 - .../create_transfer_config.rb | 47 - .../delete_transfer_config.rb | 47 - .../delete_transfer_run.rb | 47 - .../enroll_data_sources.rb | 47 - .../data_transfer_service/get_data_source.rb | 47 - .../get_transfer_config.rb | 47 - .../data_transfer_service/get_transfer_run.rb | 47 - .../list_data_sources.rb | 51 - .../list_transfer_configs.rb | 51 - .../list_transfer_logs.rb | 51 - .../list_transfer_runs.rb | 51 - .../schedule_transfer_runs.rb | 47 - .../start_manual_transfer_runs.rb | 47 - .../unenroll_data_sources.rb | 47 - .../update_transfer_config.rb | 47 - ...google.cloud.bigquery.datatransfer.v1.json | 655 ----- .../v1/data_transfer_service_paths_test.rb | 104 - .../v1/data_transfer_service_rest_test.rb | 980 -------- .../v1/data_transfer_service_test.rb | 1075 --------- .../test/helper.rb | 25 - 67 files changed, 88 insertions(+), 13472 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb delete mode 100644 owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb diff --git a/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb b/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb index c11f80344b30..2572e94d32a4 100644 --- a/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb +++ b/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb @@ -12,7 +12,7 @@ require 'google/rpc/status_pb' -descriptor_data = "\n4google/cloud/bigquery/datatransfer/v1/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08\"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"(\n\x08UserInfo\x12\x12\n\x05\x65mail\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_email\"\x9c\x08\n\x0eTransferConfig\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences\x12M\n\nowner_info\x18\x1b \x01(\x0b\x32/.google.cloud.bigquery.datatransfer.v1.UserInfoB\x03\xe0\x41\x03H\x01\x88\x01\x01\x12`\n\x18\x65ncryption_configuration\x18\x1c \x01(\x0b\x32>.google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stinationB\r\n\x0b_owner_info\"M\n\x17\x45ncryptionConfiguration\x12\x32\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xff\x06\n\x0bTransferRun\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination\"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t\"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\x8f\x02\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZMcloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb;datatransferpb\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3" +descriptor_data = "\n4google/cloud/bigquery/datatransfer/v1/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08\"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xa7\x02\n\x11ScheduleOptionsV2\x12W\n\x13time_based_schedule\x18\x01 \x01(\x0b\x32\x38.google.cloud.bigquery.datatransfer.v1.TimeBasedScheduleH\x00\x12P\n\x0fmanual_schedule\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.ManualScheduleH\x00\x12[\n\x15\x65vent_driven_schedule\x18\x03 \x01(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.EventDrivenScheduleH\x00\x42\n\n\x08schedule\"\x83\x01\n\x11TimeBasedSchedule\x12\x10\n\x08schedule\x18\x01 \x01(\t\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x10\n\x0eManualSchedule\"2\n\x13\x45ventDrivenSchedule\x12\x1b\n\x13pubsub_subscription\x18\x01 \x01(\t\"(\n\x08UserInfo\x12\x12\n\x05\x65mail\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_email\"\x9b\t\n\x0eTransferConfig\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12U\n\x13schedule_options_v2\x18\x1f \x01(\x0b\x32\x38.google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences\x12M\n\nowner_info\x18\x1b \x01(\x0b\x32/.google.cloud.bigquery.datatransfer.v1.UserInfoB\x03\xe0\x41\x03H\x01\x88\x01\x01\x12`\n\x18\x65ncryption_configuration\x18\x1c \x01(\x0b\x32>.google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration\x12&\n\x05\x65rror\x18 \x01(\x0b\x32\x12.google.rpc.StatusB\x03\xe0\x41\x03:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stinationB\r\n\x0b_owner_info\"M\n\x17\x45ncryptionConfiguration\x12\x32\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xff\x06\n\x0bTransferRun\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination\"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t\"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\x8f\x02\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZMcloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb;datatransferpb\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3" pool = Google::Protobuf::DescriptorPool.generated_pool @@ -29,8 +29,8 @@ imports = [ ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"], ["google.protobuf.Struct", "google/protobuf/struct.proto"], - ["google.protobuf.StringValue", "google/protobuf/wrappers.proto"], ["google.rpc.Status", "google/rpc/status.proto"], + ["google.protobuf.StringValue", "google/protobuf/wrappers.proto"], ] imports.each do |type_name, expected_filename| import_file = pool.lookup(type_name).file_descriptor @@ -49,6 +49,10 @@ module DataTransfer module V1 EmailPreferences = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EmailPreferences").msgclass ScheduleOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptions").msgclass + ScheduleOptionsV2 = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2").msgclass + TimeBasedSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TimeBasedSchedule").msgclass + ManualSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ManualSchedule").msgclass + EventDrivenSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EventDrivenSchedule").msgclass UserInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UserInfo").msgclass TransferConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferConfig").msgclass EncryptionConfiguration = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration").msgclass diff --git a/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb b/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb index c8288d7aea46..3b35a0ef1f6b 100644 --- a/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb +++ b/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb @@ -57,6 +57,79 @@ class ScheduleOptions extend ::Google::Protobuf::MessageExts::ClassMethods end + # V2 options customizing different types of data transfer schedule. + # This field supports existing time-based and manual transfer schedule. Also + # supports Event-Driven transfer schedule. ScheduleOptionsV2 cannot be used + # together with ScheduleOptions/Schedule. + # @!attribute [rw] time_based_schedule + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TimeBasedSchedule] + # Time based transfer schedule options. This is the default schedule + # option. + # @!attribute [rw] manual_schedule + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ManualSchedule] + # Manual transfer schedule. If set, the transfer run will not be + # auto-scheduled by the system, unless the client invokes + # StartManualTransferRuns. This is equivalent to + # disable_auto_scheduling = true. + # @!attribute [rw] event_driven_schedule + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EventDrivenSchedule] + # Event driven transfer schedule options. If set, the transfer will be + # scheduled upon events arrial. + class ScheduleOptionsV2 + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Options customizing the time based transfer schedule. + # Options are migrated from the original ScheduleOptions message. + # @!attribute [rw] schedule + # @return [::String] + # Data transfer schedule. + # If the data source does not support a custom schedule, this should be + # empty. If it is empty, the default value for the data source will be used. + # The specified times are in UTC. + # Examples of valid format: + # `1st,3rd monday of month 15:30`, + # `every wed,fri of jan,jun 13:15`, and + # `first sunday of quarter 00:00`. + # See more explanation about the format here: + # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + # + # NOTE: The minimum interval time between recurring transfers depends on the + # data source; refer to the documentation for your data source. + # @!attribute [rw] start_time + # @return [::Google::Protobuf::Timestamp] + # Specifies time to start scheduling transfer runs. The first run will be + # scheduled at or after the start time according to a recurrence pattern + # defined in the schedule string. The start time can be changed at any + # moment. + # @!attribute [rw] end_time + # @return [::Google::Protobuf::Timestamp] + # Defines time to stop scheduling transfer runs. A transfer run cannot be + # scheduled at or after the end time. The end time can be changed at any + # moment. + class TimeBasedSchedule + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Options customizing manual transfers schedule. + class ManualSchedule + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + + # Options customizing EventDriven transfers schedule. + # @!attribute [rw] pubsub_subscription + # @return [::String] + # Pub/Sub subscription name used to receive events. + # Only Google Cloud Storage data source support this option. + # Format: projects/\\{project}/subscriptions/\\{subscription} + class EventDrivenSchedule + include ::Google::Protobuf::MessageExts + extend ::Google::Protobuf::MessageExts::ClassMethods + end + # Information about a user. # @!attribute [rw] email # @return [::String] @@ -116,6 +189,11 @@ class UserInfo # @!attribute [rw] schedule_options # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptions] # Options customizing the data transfer schedule. + # @!attribute [rw] schedule_options_v2 + # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptionsV2] + # Options customizing different types of data transfer schedule. + # This field replaces "schedule" and "schedule_options" fields. + # ScheduleOptionsV2 cannot be used together with ScheduleOptions/Schedule. # @!attribute [rw] data_refresh_window_days # @return [::Integer] # The number of days to look back to automatically refresh the data. @@ -166,6 +244,10 @@ class UserInfo # granted permissions to use the key. Read methods will return the key name # applied in effect. Write methods will apply the key if it is present, or # otherwise try to apply project default keys if it is absent. + # @!attribute [r] error + # @return [::Google::Rpc::Status] + # Output only. Error code with detailed information about reason of the + # latest config failure. class TransferConfig include ::Google::Protobuf::MessageExts extend ::Google::Protobuf::MessageExts::ClassMethods diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore deleted file mode 100644 index 0135b6bc6cfc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Ignore bundler lockfiles -Gemfile.lock -gems.locked - -# Ignore documentation output -doc/* -.yardoc/* - -# Ignore test output -coverage/* - -# Ignore build artifacts -pkg/* - -# Ignore files commonly present in certain dev environments -.vagrant -.DS_STORE -.idea -*.iml - -# Ignore synth output -__pycache__ diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json deleted file mode 100644 index 682905e95539..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.repo-metadata.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "api_id": "bigquerydatatransfer.googleapis.com", - "api_shortname": "bigquerydatatransfer", - "client_documentation": "https://cloud.google.com/ruby/docs/reference/google-cloud-bigquery-data_transfer-v1/latest", - "distribution_name": "google-cloud-bigquery-data_transfer-v1", - "is_cloud": true, - "language": "ruby", - "name": "bigquerydatatransfer", - "name_pretty": "BigQuery Data Transfer Service V1 API", - "product_documentation": "https://cloud.google.com/bigquery/transfer", - "release_level": "unreleased", - "repo": "googleapis/google-cloud-ruby", - "requires_billing": true, - "ruby-cloud-description": "Schedules queries and transfers external data from SaaS applications to Google BigQuery on a regular basis. Note that google-cloud-bigquery-data_transfer-v1 is a version-specific client library. For most uses, we recommend installing the main client library google-cloud-bigquery-data_transfer instead. See the readme for more details.", - "ruby-cloud-env-prefix": "DATA_TRANSFER", - "ruby-cloud-product-url": "https://cloud.google.com/bigquery/transfer", - "library_type": "GAPIC_AUTO" -} diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml deleted file mode 100644 index 6f5635fddf0e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.rubocop.yml +++ /dev/null @@ -1,33 +0,0 @@ -inherit_gem: - google-style: google-style.yml - -AllCops: - Exclude: - - "google-cloud-bigquery-data_transfer-v1.gemspec" - - "lib/**/*_pb.rb" - - "proto_docs/**/*" - - "test/**/*" - - "acceptance/**/*" - - "samples/acceptance/**/*" - - "Rakefile" - -Layout/LineLength: - Enabled: false -Metrics/AbcSize: - Enabled: false -Metrics/ClassLength: - Enabled: false -Metrics/CyclomaticComplexity: - Enabled: false -Metrics/MethodLength: - Enabled: false -Metrics/ModuleLength: - Enabled: false -Metrics/PerceivedComplexity: - Enabled: false -Naming/AccessorMethodName: - Exclude: - - "snippets/**/*.rb" -Naming/FileName: - Exclude: - - "lib/google-cloud-bigquery-data_transfer-v1.rb" diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb deleted file mode 100644 index 23434bdd5d5b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.toys.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -toys_version! ">= 0.15.3" - -if ENV["RUBY_COMMON_TOOLS"] - common_tools_dir = File.expand_path ENV["RUBY_COMMON_TOOLS"] - load File.join(common_tools_dir, "toys", "gapic") -else - load_git remote: "https://github.com/googleapis/ruby-common-tools.git", - path: "toys/gapic", - update: true -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts deleted file mode 100644 index 304c3609fa4b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/.yardopts +++ /dev/null @@ -1,12 +0,0 @@ ---no-private ---title="BigQuery Data Transfer Service V1 API" ---exclude _pb\.rb$ ---markup markdown ---markup-provider redcarpet - -./lib/**/*.rb -./proto_docs/**/*.rb -- -README.md -LICENSE.md -AUTHENTICATION.md diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md deleted file mode 100644 index 4740aa501122..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/AUTHENTICATION.md +++ /dev/null @@ -1,122 +0,0 @@ -# Authentication - -The recommended way to authenticate to the google-cloud-bigquery-data_transfer-v1 library is to use -[Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/application-default-credentials). -To review all of your authentication options, see [Credentials lookup](#credential-lookup). - -## Quickstart - -The following example shows how to set up authentication for a local development -environment with your user credentials. - -**NOTE:** This method is _not_ recommended for running in production. User credentials -should be used only during development. - -1. [Download and install the Google Cloud CLI](https://cloud.google.com/sdk). -2. Set up a local ADC file with your user credentials: - -```sh -gcloud auth application-default login -``` - -3. Write code as if already authenticated. - -For more information about setting up authentication for a local development environment, see -[Set up Application Default Credentials](https://cloud.google.com/docs/authentication/provide-credentials-adc#local-dev). - -## Credential Lookup - -The google-cloud-bigquery-data_transfer-v1 library provides several mechanisms to configure your system. -Generally, using Application Default Credentials to facilitate automatic -credentials discovery is the easist method. But if you need to explicitly specify -credentials, there are several methods available to you. - -Credentials are accepted in the following ways, in the following order or precedence: - -1. Credentials specified in method arguments -2. Credentials specified in configuration -3. Credentials pointed to or included in environment variables -4. Credentials found in local ADC file -5. Credentials returned by the metadata server for the attached service account (GCP) - -### Configuration - -You can configure a path to a JSON credentials file, either for an individual client object or -globally, for all client objects. The JSON file can contain credentials created for -[workload identity federation](https://cloud.google.com/iam/docs/workload-identity-federation), -[workforce identity federation](https://cloud.google.com/iam/docs/workforce-identity-federation), or a -[service account key](https://cloud.google.com/docs/authentication/provide-credentials-adc#local-key). - -Note: Service account keys are a security risk if not managed correctly. You should -[choose a more secure alternative to service account keys](https://cloud.google.com/docs/authentication#auth-decision-tree) -whenever possible. - -To configure a credentials file for an individual client initialization: - -```ruby -require "google/cloud/bigquery/data_transfer/v1" - -client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = "path/to/credentialfile.json" -end -``` - -To configure a credentials file globally for all clients: - -```ruby -require "google/cloud/bigquery/data_transfer/v1" - -::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config| - config.credentials = "path/to/credentialfile.json" -end - -client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new -``` - -### Environment Variables - -You can also use an environment variable to provide a JSON credentials file. -The environment variable can contain a path to the credentials file or, for -environments such as Docker containers where writing files is not encouraged, -you can include the credentials file itself. - -The JSON file can contain credentials created for -[workload identity federation](https://cloud.google.com/iam/docs/workload-identity-federation), -[workforce identity federation](https://cloud.google.com/iam/docs/workforce-identity-federation), or a -[service account key](https://cloud.google.com/docs/authentication/provide-credentials-adc#local-key). - -Note: Service account keys are a security risk if not managed correctly. You should -[choose a more secure alternative to service account keys](https://cloud.google.com/docs/authentication#auth-decision-tree) -whenever possible. - -The environment variables that google-cloud-bigquery-data_transfer-v1 -checks for credentials are: - -* `GOOGLE_CLOUD_CREDENTIALS` - Path to JSON file, or JSON contents -* `GOOGLE_APPLICATION_CREDENTIALS` - Path to JSON file - -```ruby -require "google/cloud/bigquery/data_transfer/v1" - -ENV["GOOGLE_APPLICATION_CREDENTIALS"] = "path/to/credentialfile.json" - -client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new -``` - -### Local ADC file - -You can set up a local ADC file with your user credentials for authentication during -development. If credentials are not provided in code or in environment variables, -then the local ADC credentials are discovered. - -Follow the steps in [Quickstart](#quickstart) to set up a local ADC file. - -### Google Cloud Platform environments - -When running on Google Cloud Platform (GCP), including Google Compute Engine -(GCE), Google Kubernetes Engine (GKE), Google App Engine (GAE), Google Cloud -Functions (GCF) and Cloud Run, credentials are retrieved from the attached -service account automatically. Code should be written as if already authenticated. - -For more information, see -[Set up ADC for Google Cloud services](https://cloud.google.com/docs/authentication/provide-credentials-adc#attached-sa). diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md deleted file mode 100644 index f88957a62ba2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/CHANGELOG.md +++ /dev/null @@ -1,2 +0,0 @@ -# Release History - diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile deleted file mode 100644 index 95163a6d11f8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Gemfile +++ /dev/null @@ -1,11 +0,0 @@ -source "https://rubygems.org" - -gemspec - -gem "google-style", "~> 1.27.1" -gem "minitest", "~> 5.22" -gem "minitest-focus", "~> 1.4" -gem "minitest-rg", "~> 5.3" -gem "rake", ">= 13.0" -gem "redcarpet", "~> 3.6" -gem "yard", "~> 0.9" diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md deleted file mode 100644 index c261857ba6ad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/LICENSE.md +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md deleted file mode 100644 index 737d3269426c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/README.md +++ /dev/null @@ -1,144 +0,0 @@ -# Ruby Client for the BigQuery Data Transfer Service V1 API - -Schedule queries or transfer external data from SaaS applications to Google BigQuery on a regular basis. - -Schedules queries and transfers external data from SaaS applications to Google BigQuery on a regular basis. - -https://github.com/googleapis/google-cloud-ruby - -This gem is a _versioned_ client. It provides basic client classes for a -specific version of the BigQuery Data Transfer Service V1 API. Most users should consider using -the main client gem, -[google-cloud-bigquery-data_transfer](https://rubygems.org/gems/google-cloud-bigquery-data_transfer). -See the section below titled *Which client should I use?* for more information. - -## Installation - -``` -$ gem install google-cloud-bigquery-data_transfer-v1 -``` - -## Before You Begin - -In order to use this library, you first need to go through the following steps: - -1. [Select or create a Cloud Platform project.](https://console.cloud.google.com/project) -1. [Enable billing for your project.](https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project) -1. [Enable the API.](https://console.cloud.google.com/apis/library/bigquerydatatransfer.googleapis.com) -1. [Set up authentication.](AUTHENTICATION.md) - -## Quick Start - -```ruby -require "google/cloud/bigquery/data_transfer/v1" - -client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new -request = ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new # (request fields as keyword arguments...) -response = client.get_data_source request -``` - -View the [Client Library Documentation](https://cloud.google.com/ruby/docs/reference/google-cloud-bigquery-data_transfer-v1/latest) -for class and method documentation. - -See also the [Product Documentation](https://cloud.google.com/bigquery/transfer) -for general usage information. - -## Enabling Logging - -To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library. -The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/current/stdlibs/logger/Logger.html) as shown below, -or a [`Google::Cloud::Logging::Logger`](https://cloud.google.com/ruby/docs/reference/google-cloud-logging/latest) -that will write logs to [Cloud Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb) -and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information. - -Configuring a Ruby stdlib logger: - -```ruby -require "logger" - -module MyLogger - LOGGER = Logger.new $stderr, level: Logger::WARN - def logger - LOGGER - end -end - -# Define a gRPC module-level logger method before grpc/logconfig.rb loads. -module GRPC - extend MyLogger -end -``` - - -## Google Cloud Samples - -To browse ready to use code samples check [Google Cloud Samples](https://cloud.google.com/docs/samples). - -## Supported Ruby Versions - -This library is supported on Ruby 2.7+. - -Google provides official support for Ruby versions that are actively supported -by Ruby Core—that is, Ruby versions that are either in normal maintenance or -in security maintenance, and not end of life. Older versions of Ruby _may_ -still work, but are unsupported and not recommended. See -https://www.ruby-lang.org/en/downloads/branches/ for details about the Ruby -support schedule. - -## Which client should I use? - -Most modern Ruby client libraries for Google APIs come in two flavors: the main -client library with a name such as `google-cloud-bigquery-data_transfer`, -and lower-level _versioned_ client libraries with names such as -`google-cloud-bigquery-data_transfer-v1`. -_In most cases, you should install the main client._ - -### What's the difference between the main client and a versioned client? - -A _versioned client_ provides a basic set of data types and client classes for -a _single version_ of a specific service. (That is, for a service with multiple -versions, there might be a separate versioned client for each service version.) -Most versioned clients are written and maintained by a code generator. - -The _main client_ is designed to provide you with the _recommended_ client -interfaces for the service. There will be only one main client for any given -service, even a service with multiple versions. The main client includes -factory methods for constructing the client objects we recommend for most -users. In some cases, those will be classes provided by an underlying versioned -client; in other cases, they will be handwritten higher-level client objects -with additional capabilities, convenience methods, or best practices built in. -Generally, the main client will default to a recommended service version, -although in some cases you can override this if you need to talk to a specific -service version. - -### Why would I want to use the main client? - -We recommend that most users install the main client gem for a service. You can -identify this gem as the one _without_ a version in its name, e.g. -`google-cloud-bigquery-data_transfer`. -The main client is recommended because it will embody the best practices for -accessing the service, and may also provide more convenient interfaces or -tighter integration into frameworks and third-party libraries. In addition, the -documentation and samples published by Google will generally demonstrate use of -the main client. - -### Why would I want to use a versioned client? - -You can use a versioned client if you are content with a possibly lower-level -class interface, you explicitly want to avoid features provided by the main -client, or you want to access a specific service version not be covered by the -main client. You can identify versioned client gems because the service version -is part of the name, e.g. `google-cloud-bigquery-data_transfer-v1`. - -### What about the google-apis- clients? - -Client library gems with names that begin with `google-apis-` are based on an -older code generation technology. They talk to a REST/JSON backend (whereas -most modern clients talk to a [gRPC](https://grpc.io/) backend) and they may -not offer the same performance, features, and ease of use provided by more -modern clients. - -The `google-apis-` clients have wide coverage across Google services, so you -might need to use one if there is no modern client available for the service. -However, if a modern client is available, we generally recommend it over the -older `google-apis-` clients. diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile deleted file mode 100644 index 92a8296dfacc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/Rakefile +++ /dev/null @@ -1,168 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "bundler/setup" -require "bundler/gem_tasks" - -require "rubocop/rake_task" -RuboCop::RakeTask.new - -require "rake/testtask" -desc "Run tests." -Rake::TestTask.new do |t| - t.libs << "test" - t.test_files = FileList["test/**/*_test.rb"] - t.warning = false -end - -desc "Runs the smoke tests." -Rake::TestTask.new :smoke_test do |t| - t.test_files = FileList["acceptance/**/*smoke_test.rb"] - t.warning = false -end - -# Acceptance tests -desc "Run the google-cloud-bigquery-data_transfer-v1 acceptance tests." -task :acceptance, :project, :keyfile do |t, args| - project = args[:project] - project ||= - ENV["DATA_TRANSFER_TEST_PROJECT"] || - ENV["GCLOUD_TEST_PROJECT"] - keyfile = args[:keyfile] - keyfile ||= - ENV["DATA_TRANSFER_TEST_KEYFILE"] || - ENV["GCLOUD_TEST_KEYFILE"] - if keyfile - keyfile = File.read keyfile - else - keyfile ||= - ENV["DATA_TRANSFER_TEST_KEYFILE_JSON"] || - ENV["GCLOUD_TEST_KEYFILE_JSON"] - end - if project.nil? || keyfile.nil? - fail "You must provide a project and keyfile. e.g. rake acceptance[test123, /path/to/keyfile.json] or DATA_TRANSFER_TEST_PROJECT=test123 DATA_TRANSFER_TEST_KEYFILE=/path/to/keyfile.json rake acceptance" - end - require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials" - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Credentials.env_vars.each do |path| - ENV[path] = nil - end - ENV["DATA_TRANSFER_PROJECT"] = project - ENV["DATA_TRANSFER_TEST_PROJECT"] = project - ENV["DATA_TRANSFER_KEYFILE_JSON"] = keyfile - - Rake::Task["acceptance:run"].invoke -end - -namespace :acceptance do - task :run do - if File.directory? "acceptance" - Rake::Task[:smoke_test].invoke - else - puts "The google-cloud-bigquery-data_transfer-v1 gem has no acceptance tests." - end - end - - desc "Run acceptance cleanup." - task :cleanup do - end -end - -task :samples do - Rake::Task["samples:latest"].invoke -end - -namespace :samples do - task :latest do - if File.directory? "samples" - Dir.chdir "samples" do - Bundler.with_clean_env do - ENV["GOOGLE_CLOUD_SAMPLES_TEST"] = "not_master" - sh "bundle update" - sh "bundle exec rake test" - end - end - else - puts "The google-cloud-bigquery-data_transfer-v1 gem has no samples to test." - end - end - - task :master do - if File.directory? "samples" - Dir.chdir "samples" do - Bundler.with_clean_env do - ENV["GOOGLE_CLOUD_SAMPLES_TEST"] = "master" - sh "bundle update" - sh "bundle exec rake test" - end - end - else - puts "The google-cloud-bigquery-data_transfer-v1 gem has no samples to test." - end - end -end - -require "yard" -require "yard/rake/yardoc_task" -YARD::Rake::YardocTask.new do |y| -end - -desc "Run yard-doctest example tests." -task :doctest do - puts "The google-cloud-bigquery-data_transfer-v1 gem does not have doctest tests." -end - -desc "Run the CI build" -task :ci do - header "BUILDING google-cloud-bigquery-data_transfer-v1" - header "google-cloud-bigquery-data_transfer-v1 rubocop", "*" - Rake::Task[:rubocop].invoke - header "google-cloud-bigquery-data_transfer-v1 yard", "*" - Rake::Task[:yard].invoke - header "google-cloud-bigquery-data_transfer-v1 test", "*" - Rake::Task[:test].invoke -end - -namespace :ci do - desc "Run the CI build, with smoke tests." - task :smoke_test do - Rake::Task[:ci].invoke - header "google-cloud-bigquery-data_transfer-v1 smoke_test", "*" - Rake::Task[:smoke_test].invoke - end - desc "Run the CI build, with acceptance tests." - task :acceptance do - Rake::Task[:ci].invoke - header "google-cloud-bigquery-data_transfer-v1 acceptance", "*" - Rake::Task[:acceptance].invoke - end - task :a do - # This is a handy shortcut to save typing - Rake::Task["ci:acceptance"].invoke - end -end - -task default: :test - -def header str, token = "#" - line_length = str.length + 8 - puts "" - puts token * line_length - puts "#{token * 3} #{str} #{token * 3}" - puts token * line_length - puts "" -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json deleted file mode 100644 index 7ae4ebcd10a5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/gapic_metadata.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "ruby", - "protoPackage": "google.cloud.bigquery.datatransfer.v1", - "libraryPackage": "::Google::Cloud::Bigquery::DataTransfer::V1", - "services": { - "DataTransferService": { - "clients": { - "grpc": { - "libraryClient": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client", - "rpcs": { - "GetDataSource": { - "methods": [ - "get_data_source" - ] - }, - "ListDataSources": { - "methods": [ - "list_data_sources" - ] - }, - "CreateTransferConfig": { - "methods": [ - "create_transfer_config" - ] - }, - "UpdateTransferConfig": { - "methods": [ - "update_transfer_config" - ] - }, - "DeleteTransferConfig": { - "methods": [ - "delete_transfer_config" - ] - }, - "GetTransferConfig": { - "methods": [ - "get_transfer_config" - ] - }, - "ListTransferConfigs": { - "methods": [ - "list_transfer_configs" - ] - }, - "ScheduleTransferRuns": { - "methods": [ - "schedule_transfer_runs" - ] - }, - "StartManualTransferRuns": { - "methods": [ - "start_manual_transfer_runs" - ] - }, - "GetTransferRun": { - "methods": [ - "get_transfer_run" - ] - }, - "DeleteTransferRun": { - "methods": [ - "delete_transfer_run" - ] - }, - "ListTransferRuns": { - "methods": [ - "list_transfer_runs" - ] - }, - "ListTransferLogs": { - "methods": [ - "list_transfer_logs" - ] - }, - "CheckValidCreds": { - "methods": [ - "check_valid_creds" - ] - }, - "EnrollDataSources": { - "methods": [ - "enroll_data_sources" - ] - }, - "UnenrollDataSources": { - "methods": [ - "unenroll_data_sources" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec deleted file mode 100644 index 8d124f6d7e84..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/google-cloud-bigquery-data_transfer-v1.gemspec +++ /dev/null @@ -1,29 +0,0 @@ -# -*- ruby -*- -# encoding: utf-8 - -require File.expand_path("lib/google/cloud/bigquery/data_transfer/v1/version", __dir__) - -Gem::Specification.new do |gem| - gem.name = "google-cloud-bigquery-data_transfer-v1" - gem.version = Google::Cloud::Bigquery::DataTransfer::V1::VERSION - - gem.authors = ["Google LLC"] - gem.email = "googleapis-packages@google.com" - gem.description = "Schedules queries and transfers external data from SaaS applications to Google BigQuery on a regular basis. Note that google-cloud-bigquery-data_transfer-v1 is a version-specific client library. For most uses, we recommend installing the main client library google-cloud-bigquery-data_transfer instead. See the readme for more details." - gem.summary = "Schedule queries or transfer external data from SaaS applications to Google BigQuery on a regular basis." - gem.homepage = "https://github.com/googleapis/google-cloud-ruby" - gem.license = "Apache-2.0" - - gem.platform = Gem::Platform::RUBY - - gem.files = `git ls-files -- lib/*`.split("\n") + - `git ls-files -- proto_docs/*`.split("\n") + - ["README.md", "LICENSE.md", "AUTHENTICATION.md", ".yardopts"] - gem.require_paths = ["lib"] - - gem.required_ruby_version = ">= 2.7" - - gem.add_dependency "gapic-common", ">= 0.21.1", "< 2.a" - gem.add_dependency "google-cloud-errors", "~> 1.0" - gem.add_dependency "google-cloud-location", ">= 0.7", "< 2.a" -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb deleted file mode 100644 index af6e59922b17..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google-cloud-bigquery-data_transfer-v1.rb +++ /dev/null @@ -1,21 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# This gem does not autoload during Bundler.require. To load this gem, -# issue explicit require statements for the packages desired, e.g.: -# require "google/cloud/bigquery/data_transfer/v1" diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb deleted file mode 100644 index c5f4d18ce0af..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" -require "google/cloud/bigquery/data_transfer/v1/version" - -module Google - module Cloud - module Bigquery - module DataTransfer - ## - # API client module. - # - # @example Load this package, including all its services, and instantiate a gRPC client - # - # require "google/cloud/bigquery/data_transfer/v1" - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # @example Load this package, including all its services, and instantiate a REST client - # - # require "google/cloud/bigquery/data_transfer/v1" - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - module V1 - end - end - end - end -end - -helper_path = ::File.join __dir__, "v1", "_helpers.rb" -require "google/cloud/bigquery/data_transfer/v1/_helpers" if ::File.file? helper_path diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb deleted file mode 100644 index 56c4bf1ffbfb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/bindings_override.rb +++ /dev/null @@ -1,104 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "gapic/config" - -module Google - module Cloud - module Bigquery - module DataTransfer - ## - # @example Loading just the REST part of this package, including all its services, and instantiating a REST client - # - # require "google/cloud/bigquery/data_transfer/v1/rest" - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - module V1 - ## - # @private - # Initialize the mixin bindings configuration - # - def self.configure - @configure ||= begin - namespace = ["Google", "Cloud", "Bigquery", "DataTransfer"] - parent_config = while namespace.any? - parent_name = namespace.join "::" - parent_const = const_get parent_name - break parent_const.configure if parent_const.respond_to? :configure - namespace.pop - end - - default_config = Configuration.new parent_config - default_config.bindings_override["google.cloud.location.Locations.GetLocation"] = [ - Gapic::Rest::GrpcTranscoder::HttpBinding.create_with_validation( - uri_method: :get, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/?$}, false] - ], - body: nil - ) - ] - default_config.bindings_override["google.cloud.location.Locations.ListLocations"] = [ - - Gapic::Rest::GrpcTranscoder::HttpBinding.create_with_validation( - uri_method: :get, - uri_template: "/v1/{name}/locations", - matches: [ - ["name", %r{^projects/[^/]+/?$}, false] - ], - body: nil - ) - ] - default_config - end - yield @configure if block_given? - @configure - end - - ## - # @private - # Configuration class for the google.cloud.bigquery.datatransfer.v1 package. - # - # This class contains common configuration for all services - # of the google.cloud.bigquery.datatransfer.v1 package. - # - # This configuration is for internal use of the client library classes, - # and it is not intended that the end-users will read or change it. - # - class Configuration - extend ::Gapic::Config - - # @private - # Overrides for http bindings for the RPC of the mixins for this package. - # Services in this package should use these when creating clients for the mixin services. - # @return [::Hash{::Symbol=>::Array<::Gapic::Rest::GrpcTranscoder::HttpBinding>}] - config_attr :bindings_override, {}, ::Hash, nil - - # @private - def initialize parent_config = nil - @parent_config = parent_config unless parent_config.nil? - - yield self if block_given? - end - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb deleted file mode 100644 index 0174fd3ee9b3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb +++ /dev/null @@ -1,57 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "gapic/common" -require "gapic/config" -require "gapic/config/method" - -require "google/cloud/bigquery/data_transfer/v1/version" - -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/client" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - ## - # This API allows users to manage their data transfers into BigQuery. - # - # @example Load this service and instantiate a gRPC client - # - # require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # @example Load this service and instantiate a REST client - # - # require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - module DataTransferService - end - end - end - end - end -end - -helper_path = ::File.join __dir__, "data_transfer_service", "helpers.rb" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/helpers" if ::File.file? helper_path diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb deleted file mode 100644 index b0c00b138ba2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb +++ /dev/null @@ -1,2127 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "google/cloud/errors" -require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" -require "google/cloud/location" - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - module DataTransferService - ## - # Client for the DataTransferService service. - # - # This API allows users to manage their data transfers into BigQuery. - # - class Client - # @private - API_VERSION = "" - - # @private - DEFAULT_ENDPOINT_TEMPLATE = "bigquerydatatransfer.$UNIVERSE_DOMAIN$" - - include Paths - - # @private - attr_reader :data_transfer_service_stub - - ## - # Configure the DataTransferService Client class. - # - # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration} - # for a description of the configuration fields. - # - # @example - # - # # Modify the configuration for all DataTransferService clients - # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config| - # config.timeout = 10.0 - # end - # - # @yield [config] Configure the Client client. - # @yieldparam config [Client::Configuration] - # - # @return [Client::Configuration] - # - def self.configure - @configure ||= begin - namespace = ["Google", "Cloud", "Bigquery", "DataTransfer", "V1"] - parent_config = while namespace.any? - parent_name = namespace.join "::" - parent_const = const_get parent_name - break parent_const.configure if parent_const.respond_to? :configure - namespace.pop - end - default_config = Client::Configuration.new parent_config - - default_config.rpcs.get_data_source.timeout = 20.0 - default_config.rpcs.get_data_source.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_data_sources.timeout = 20.0 - default_config.rpcs.list_data_sources.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.create_transfer_config.timeout = 30.0 - - default_config.rpcs.update_transfer_config.timeout = 30.0 - - default_config.rpcs.delete_transfer_config.timeout = 20.0 - default_config.rpcs.delete_transfer_config.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.get_transfer_config.timeout = 20.0 - default_config.rpcs.get_transfer_config.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_transfer_configs.timeout = 20.0 - default_config.rpcs.list_transfer_configs.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.schedule_transfer_runs.timeout = 30.0 - - default_config.rpcs.get_transfer_run.timeout = 20.0 - default_config.rpcs.get_transfer_run.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.delete_transfer_run.timeout = 20.0 - default_config.rpcs.delete_transfer_run.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_transfer_runs.timeout = 20.0 - default_config.rpcs.list_transfer_runs.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_transfer_logs.timeout = 20.0 - default_config.rpcs.list_transfer_logs.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.check_valid_creds.timeout = 20.0 - default_config.rpcs.check_valid_creds.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config - end - yield @configure if block_given? - @configure - end - - ## - # Configure the DataTransferService Client instance. - # - # The configuration is set to the derived mode, meaning that values can be changed, - # but structural changes (adding new fields, etc.) are not allowed. Structural changes - # should be made on {Client.configure}. - # - # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration} - # for a description of the configuration fields. - # - # @yield [config] Configure the Client client. - # @yieldparam config [Client::Configuration] - # - # @return [Client::Configuration] - # - def configure - yield @config if block_given? - @config - end - - ## - # The effective universe domain - # - # @return [String] - # - def universe_domain - @data_transfer_service_stub.universe_domain - end - - ## - # Create a new DataTransferService client object. - # - # @example - # - # # Create a client using the default configuration - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a client using a custom configuration - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - # config.timeout = 10.0 - # end - # - # @yield [config] Configure the DataTransferService client. - # @yieldparam config [Client::Configuration] - # - def initialize - # These require statements are intentionally placed here to initialize - # the gRPC module only when it's required. - # See https://github.com/googleapis/toolkit/issues/446 - require "gapic/grpc" - require "google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb" - - # Create the configuration object - @config = Configuration.new Client.configure - - # Yield the configuration if needed - yield @config if block_given? - - # Create credentials - credentials = @config.credentials - # Use self-signed JWT if the endpoint is unchanged from default, - # but only if the default endpoint does not have a region prefix. - enable_self_signed_jwt = @config.endpoint.nil? || - (@config.endpoint == Configuration::DEFAULT_ENDPOINT && - !@config.endpoint.split(".").first.include?("-")) - credentials ||= Credentials.default scope: @config.scope, - enable_self_signed_jwt: enable_self_signed_jwt - if credentials.is_a?(::String) || credentials.is_a?(::Hash) - credentials = Credentials.new credentials, scope: @config.scope - end - @quota_project_id = @config.quota_project - @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id - - @data_transfer_service_stub = ::Gapic::ServiceStub.new( - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Stub, - credentials: credentials, - endpoint: @config.endpoint, - endpoint_template: DEFAULT_ENDPOINT_TEMPLATE, - universe_domain: @config.universe_domain, - channel_args: @config.channel_args, - interceptors: @config.interceptors, - channel_pool_config: @config.channel_pool - ) - - @location_client = Google::Cloud::Location::Locations::Client.new do |config| - config.credentials = credentials - config.quota_project = @quota_project_id - config.endpoint = @data_transfer_service_stub.endpoint - config.universe_domain = @data_transfer_service_stub.universe_domain - end - end - - ## - # Get the associated client for mix-in of the Locations. - # - # @return [Google::Cloud::Location::Locations::Client] - # - attr_reader :location_client - - # Service calls - - ## - # Retrieves a supported data source and returns its settings. - # - # @overload get_data_source(request, options = nil) - # Pass arguments to `get_data_source` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload get_data_source(name: nil) - # Pass arguments to `get_data_source` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/dataSources/{data_source_id}` or - # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new - # - # # Call the get_data_source method. - # result = client.get_data_source request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource. - # p result - # - def get_data_source request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.get_data_source.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.get_data_source.timeout, - metadata: metadata, - retry_policy: @config.rpcs.get_data_source.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :get_data_source, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Lists supported data sources and returns their settings. - # - # @overload list_data_sources(request, options = nil) - # Pass arguments to `list_data_sources` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload list_data_sources(parent: nil, page_token: nil, page_size: nil) - # Pass arguments to `list_data_sources` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. The BigQuery project id for which data sources should be - # returned. Must be in the form: `projects/{project_id}` or - # `projects/{project_id}/locations/{location_id}` - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListDataSourcesRequest` list results. For multiple-page - # results, `ListDataSourcesResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new - # - # # Call the list_data_sources method. - # result = client.list_data_sources request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource. - # p item - # end - # - def list_data_sources request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.list_data_sources.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.parent - header_params["parent"] = request.parent - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.list_data_sources.timeout, - metadata: metadata, - retry_policy: @config.rpcs.list_data_sources.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :list_data_sources, request, options: options do |response, operation| - response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_data_sources, request, response, operation, options - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Creates a new data transfer configuration. - # - # @overload create_transfer_config(request, options = nil) - # Pass arguments to `create_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload create_transfer_config(parent: nil, transfer_config: nil, authorization_code: nil, version_info: nil, service_account_name: nil) - # Pass arguments to `create_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. The BigQuery project id where the transfer configuration should - # be created. Must be in the format - # projects/\\{project_id}/locations/\\{location_id} or projects/\\{project_id}. If - # specified location and location of the destination bigquery dataset do not - # match - the request will fail. - # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] - # Required. Data transfer configuration to create. - # @param authorization_code [::String] - # Deprecated: Authorization code was required when - # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used - # in any data sources. Use `version_info` instead. - # - # Optional OAuth2 authorization code to use with this transfer configuration. - # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' - # and new credentials are needed, as indicated by `CheckValidCreds`. In order - # to obtain authorization_code, make a request to the following URL: - #
-              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-              #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # create the transfer config. - # @param version_info [::String] - # Optional version info. This parameter replaces `authorization_code` which - # is no longer used in any data sources. This is required only if - # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials - # are needed, as indicated by `CheckValidCreds`. In order to obtain version - # info, make a request to the following URL: - #
-              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-              #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # create the transfer config. - # @param service_account_name [::String] - # Optional service account email. If this field is set, the transfer config - # will be created with this service account's credentials. It requires that - # the requesting user calling this API has permissions to act as this service - # account. - # - # Note that not all data sources support service account credentials when - # creating a transfer config. For the latest list of data sources, read about - # [using service - # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new - # - # # Call the create_transfer_config method. - # result = client.create_transfer_config request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p result - # - def create_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.create_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.parent - header_params["parent"] = request.parent - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.create_transfer_config.timeout, - metadata: metadata, - retry_policy: @config.rpcs.create_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :create_transfer_config, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Updates a data transfer configuration. - # All fields must be set, even if they are not updated. - # - # @overload update_transfer_config(request, options = nil) - # Pass arguments to `update_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload update_transfer_config(transfer_config: nil, authorization_code: nil, update_mask: nil, version_info: nil, service_account_name: nil) - # Pass arguments to `update_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] - # Required. Data transfer configuration to create. - # @param authorization_code [::String] - # Deprecated: Authorization code was required when - # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used - # in any data sources. Use `version_info` instead. - # - # Optional OAuth2 authorization code to use with this transfer configuration. - # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' - # and new credentials are needed, as indicated by `CheckValidCreds`. In order - # to obtain authorization_code, make a request to the following URL: - #
-              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-              #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # update the transfer config. - # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] - # Required. Required list of fields to be updated in this request. - # @param version_info [::String] - # Optional version info. This parameter replaces `authorization_code` which - # is no longer used in any data sources. This is required only if - # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials - # are needed, as indicated by `CheckValidCreds`. In order to obtain version - # info, make a request to the following URL: - #
-              #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-              #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # update the transfer config. - # @param service_account_name [::String] - # Optional service account email. If this field is set, the transfer config - # will be created with this service account's credentials. It requires that - # the requesting user calling this API has permissions to act as this service - # account. - # - # Note that not all data sources support service account credentials when - # creating a transfer config. For the latest list of data sources, read about - # [using service - # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new - # - # # Call the update_transfer_config method. - # result = client.update_transfer_config request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p result - # - def update_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.update_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.transfer_config&.name - header_params["transfer_config.name"] = request.transfer_config.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.update_transfer_config.timeout, - metadata: metadata, - retry_policy: @config.rpcs.update_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :update_transfer_config, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Deletes a data transfer configuration, including any associated transfer - # runs and logs. - # - # @overload delete_transfer_config(request, options = nil) - # Pass arguments to `delete_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload delete_transfer_config(name: nil) - # Pass arguments to `delete_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Protobuf::Empty] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new - # - # # Call the delete_transfer_config method. - # result = client.delete_transfer_config request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def delete_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.delete_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.delete_transfer_config.timeout, - metadata: metadata, - retry_policy: @config.rpcs.delete_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :delete_transfer_config, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about a data transfer config. - # - # @overload get_transfer_config(request, options = nil) - # Pass arguments to `get_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload get_transfer_config(name: nil) - # Pass arguments to `get_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new - # - # # Call the get_transfer_config method. - # result = client.get_transfer_config request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p result - # - def get_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.get_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.get_transfer_config.timeout, - metadata: metadata, - retry_policy: @config.rpcs.get_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :get_transfer_config, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about all transfer configs owned by a project in the - # specified location. - # - # @overload list_transfer_configs(request, options = nil) - # Pass arguments to `list_transfer_configs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload list_transfer_configs(parent: nil, data_source_ids: nil, page_token: nil, page_size: nil) - # Pass arguments to `list_transfer_configs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. The BigQuery project id for which transfer configs - # should be returned: `projects/{project_id}` or - # `projects/{project_id}/locations/{location_id}` - # @param data_source_ids [::Array<::String>] - # When specified, only configurations of requested data sources are returned. - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransfersRequest` list results. For multiple-page - # results, `ListTransfersResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new - # - # # Call the list_transfer_configs method. - # result = client.list_transfer_configs request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p item - # end - # - def list_transfer_configs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.list_transfer_configs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.parent - header_params["parent"] = request.parent - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.list_transfer_configs.timeout, - metadata: metadata, - retry_policy: @config.rpcs.list_transfer_configs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :list_transfer_configs, request, options: options do |response, operation| - response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_configs, request, response, operation, options - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Creates transfer runs for a time range [start_time, end_time]. - # For each date - or whatever granularity the data source supports - in the - # range, one transfer run is created. - # Note that runs are created per UTC time in the time range. - # DEPRECATED: use StartManualTransferRuns instead. - # - # @deprecated This method is deprecated and may be removed in the next major version update. - # - # @overload schedule_transfer_runs(request, options = nil) - # Pass arguments to `schedule_transfer_runs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload schedule_transfer_runs(parent: nil, start_time: nil, end_time: nil) - # Pass arguments to `schedule_transfer_runs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Transfer configuration name in the form: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @param start_time [::Google::Protobuf::Timestamp, ::Hash] - # Required. Start time of the range of transfer runs. For example, - # `"2017-05-25T00:00:00+00:00"`. - # @param end_time [::Google::Protobuf::Timestamp, ::Hash] - # Required. End time of the range of transfer runs. For example, - # `"2017-05-30T00:00:00+00:00"`. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new - # - # # Call the schedule_transfer_runs method. - # result = client.schedule_transfer_runs request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse. - # p result - # - def schedule_transfer_runs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.schedule_transfer_runs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.parent - header_params["parent"] = request.parent - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.schedule_transfer_runs.timeout, - metadata: metadata, - retry_policy: @config.rpcs.schedule_transfer_runs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :schedule_transfer_runs, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Start manual transfer runs to be executed now with schedule_time equal to - # current time. The transfer runs can be created for a time range where the - # run_time is between start_time (inclusive) and end_time (exclusive), or for - # a specific run_time. - # - # @overload start_manual_transfer_runs(request, options = nil) - # Pass arguments to `start_manual_transfer_runs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload start_manual_transfer_runs(parent: nil, requested_time_range: nil, requested_run_time: nil) - # Pass arguments to `start_manual_transfer_runs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Transfer configuration name in the form: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @param requested_time_range [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange, ::Hash] - # A time_range start and end timestamp for historical data files or reports - # that are scheduled to be transferred by the scheduled transfer run. - # requested_time_range must be a past time and cannot include future time - # values. - # @param requested_run_time [::Google::Protobuf::Timestamp, ::Hash] - # A run_time timestamp for historical data files or reports - # that are scheduled to be transferred by the scheduled transfer run. - # requested_run_time must be a past time and cannot include future time - # values. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new - # - # # Call the start_manual_transfer_runs method. - # result = client.start_manual_transfer_runs request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse. - # p result - # - def start_manual_transfer_runs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.start_manual_transfer_runs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.parent - header_params["parent"] = request.parent - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.start_manual_transfer_runs.timeout, - metadata: metadata, - retry_policy: @config.rpcs.start_manual_transfer_runs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :start_manual_transfer_runs, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about the particular transfer run. - # - # @overload get_transfer_run(request, options = nil) - # Pass arguments to `get_transfer_run` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload get_transfer_run(name: nil) - # Pass arguments to `get_transfer_run` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - # or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new - # - # # Call the get_transfer_run method. - # result = client.get_transfer_run request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. - # p result - # - def get_transfer_run request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.get_transfer_run.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.get_transfer_run.timeout, - metadata: metadata, - retry_policy: @config.rpcs.get_transfer_run.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :get_transfer_run, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Deletes the specified transfer run. - # - # @overload delete_transfer_run(request, options = nil) - # Pass arguments to `delete_transfer_run` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload delete_transfer_run(name: nil) - # Pass arguments to `delete_transfer_run` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - # or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Protobuf::Empty] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new - # - # # Call the delete_transfer_run method. - # result = client.delete_transfer_run request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def delete_transfer_run request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.delete_transfer_run.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.delete_transfer_run.timeout, - metadata: metadata, - retry_policy: @config.rpcs.delete_transfer_run.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :delete_transfer_run, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about running and completed transfer runs. - # - # @overload list_transfer_runs(request, options = nil) - # Pass arguments to `list_transfer_runs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload list_transfer_runs(parent: nil, states: nil, page_token: nil, page_size: nil, run_attempt: nil) - # Pass arguments to `list_transfer_runs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Name of transfer configuration for which transfer runs should be - # retrieved. Format of transfer configuration resource name is: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @param states [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferState>] - # When specified, only transfer runs with requested states are returned. - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransferRunsRequest` list results. For multiple-page - # results, `ListTransferRunsResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @param run_attempt [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt] - # Indicates how run attempts are to be pulled. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new - # - # # Call the list_transfer_runs method. - # result = client.list_transfer_runs request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. - # p item - # end - # - def list_transfer_runs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.list_transfer_runs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.parent - header_params["parent"] = request.parent - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.list_transfer_runs.timeout, - metadata: metadata, - retry_policy: @config.rpcs.list_transfer_runs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :list_transfer_runs, request, options: options do |response, operation| - response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_runs, request, response, operation, options - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns log messages for the transfer run. - # - # @overload list_transfer_logs(request, options = nil) - # Pass arguments to `list_transfer_logs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload list_transfer_logs(parent: nil, page_token: nil, page_size: nil, message_types: nil) - # Pass arguments to `list_transfer_logs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Transfer run name in the form: - # `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransferLogsRequest` list results. For multiple-page - # results, `ListTransferLogsResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @param message_types [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>] - # Message types to return. If not populated - INFO, WARNING and ERROR - # messages are returned. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new - # - # # Call the list_transfer_logs method. - # result = client.list_transfer_logs request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage. - # p item - # end - # - def list_transfer_logs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.list_transfer_logs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.parent - header_params["parent"] = request.parent - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.list_transfer_logs.timeout, - metadata: metadata, - retry_policy: @config.rpcs.list_transfer_logs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :list_transfer_logs, request, options: options do |response, operation| - response = ::Gapic::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_logs, request, response, operation, options - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns true if valid credentials exist for the given data source and - # requesting user. - # - # @overload check_valid_creds(request, options = nil) - # Pass arguments to `check_valid_creds` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload check_valid_creds(name: nil) - # Pass arguments to `check_valid_creds` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The data source in the form: - # `projects/{project_id}/dataSources/{data_source_id}` or - # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new - # - # # Call the check_valid_creds method. - # result = client.check_valid_creds request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse. - # p result - # - def check_valid_creds request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.check_valid_creds.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.check_valid_creds.timeout, - metadata: metadata, - retry_policy: @config.rpcs.check_valid_creds.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :check_valid_creds, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Enroll data sources in a user project. This allows users to create transfer - # configurations for these data sources. They will also appear in the - # ListDataSources RPC and as such, will appear in the - # [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents - # can be found in the public guide for - # [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and - # [Data Transfer - # Service](https://cloud.google.com/bigquery/docs/working-with-transfers). - # - # @overload enroll_data_sources(request, options = nil) - # Pass arguments to `enroll_data_sources` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload enroll_data_sources(name: nil, data_source_ids: nil) - # Pass arguments to `enroll_data_sources` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The name of the project resource in the form: - # `projects/{project_id}` - # @param data_source_ids [::Array<::String>] - # Data sources that are enrolled. It is required to provide at least one - # data source id. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Protobuf::Empty] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new - # - # # Call the enroll_data_sources method. - # result = client.enroll_data_sources request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def enroll_data_sources request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.enroll_data_sources.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.enroll_data_sources.timeout, - metadata: metadata, - retry_policy: @config.rpcs.enroll_data_sources.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :enroll_data_sources, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Unenroll data sources in a user project. This allows users to remove - # transfer configurations for these data sources. They will no longer appear - # in the ListDataSources RPC and will also no longer appear in the [BigQuery - # UI](https://console.cloud.google.com/bigquery). Data transfers - # configurations of unenrolled data sources will not be scheduled. - # - # @overload unenroll_data_sources(request, options = nil) - # Pass arguments to `unenroll_data_sources` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. - # - # @overload unenroll_data_sources(name: nil, data_source_ids: nil) - # Pass arguments to `unenroll_data_sources` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The name of the project resource in the form: - # `projects/{project_id}` - # @param data_source_ids [::Array<::String>] - # Data sources that are unenrolled. It is required to provide at least one - # data source id. - # - # @yield [response, operation] Access the result along with the RPC operation - # @yieldparam response [::Google::Protobuf::Empty] - # @yieldparam operation [::GRPC::ActiveCall::Operation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the RPC is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new - # - # # Call the unenroll_data_sources method. - # result = client.unenroll_data_sources request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def unenroll_data_sources request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - metadata = @config.rpcs.unenroll_data_sources.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION - metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - header_params = {} - if request.name - header_params["name"] = request.name - end - - request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") - metadata[:"x-goog-request-params"] ||= request_params_header - - options.apply_defaults timeout: @config.rpcs.unenroll_data_sources.timeout, - metadata: metadata, - retry_policy: @config.rpcs.unenroll_data_sources.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.call_rpc :unenroll_data_sources, request, options: options do |response, operation| - yield response, operation if block_given? - return response - end - rescue ::GRPC::BadStatus => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Configuration class for the DataTransferService API. - # - # This class represents the configuration for DataTransferService, - # providing control over timeouts, retry behavior, logging, transport - # parameters, and other low-level controls. Certain parameters can also be - # applied individually to specific RPCs. See - # {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration::Rpcs} - # for a list of RPCs that can be configured independently. - # - # Configuration can be applied globally to all clients, or to a single client - # on construction. - # - # @example - # - # # Modify the global config, setting the timeout for - # # get_data_source to 20 seconds, - # # and all remaining timeouts to 10 seconds. - # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config| - # config.timeout = 10.0 - # config.rpcs.get_data_source.timeout = 20.0 - # end - # - # # Apply the above configuration only to a new client. - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - # config.timeout = 10.0 - # config.rpcs.get_data_source.timeout = 20.0 - # end - # - # @!attribute [rw] endpoint - # A custom service endpoint, as a hostname or hostname:port. The default is - # nil, indicating to use the default endpoint in the current universe domain. - # @return [::String,nil] - # @!attribute [rw] credentials - # Credentials to send with calls. You may provide any of the following types: - # * (`String`) The path to a service account key file in JSON format - # * (`Hash`) A service account key as a Hash - # * (`Google::Auth::Credentials`) A googleauth credentials object - # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials)) - # * (`Signet::OAuth2::Client`) A signet oauth2 client object - # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client)) - # * (`GRPC::Core::Channel`) a gRPC channel with included credentials - # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object - # * (`nil`) indicating no credentials - # @return [::Object] - # @!attribute [rw] scope - # The OAuth scopes - # @return [::Array<::String>] - # @!attribute [rw] lib_name - # The library name as recorded in instrumentation and logging - # @return [::String] - # @!attribute [rw] lib_version - # The library version as recorded in instrumentation and logging - # @return [::String] - # @!attribute [rw] channel_args - # Extra parameters passed to the gRPC channel. Note: this is ignored if a - # `GRPC::Core::Channel` object is provided as the credential. - # @return [::Hash] - # @!attribute [rw] interceptors - # An array of interceptors that are run before calls are executed. - # @return [::Array<::GRPC::ClientInterceptor>] - # @!attribute [rw] timeout - # The call timeout in seconds. - # @return [::Numeric] - # @!attribute [rw] metadata - # Additional gRPC headers to be sent with the call. - # @return [::Hash{::Symbol=>::String}] - # @!attribute [rw] retry_policy - # The retry policy. The value is a hash with the following keys: - # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. - # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. - # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. - # * `:retry_codes` (*type:* `Array`) - The error codes that should - # trigger a retry. - # @return [::Hash] - # @!attribute [rw] quota_project - # A separate project against which to charge quota. - # @return [::String] - # @!attribute [rw] universe_domain - # The universe domain within which to make requests. This determines the - # default endpoint URL. The default value of nil uses the environment - # universe (usually the default "googleapis.com" universe). - # @return [::String,nil] - # - class Configuration - extend ::Gapic::Config - - # @private - # The endpoint specific to the default "googleapis.com" universe. Deprecated. - DEFAULT_ENDPOINT = "bigquerydatatransfer.googleapis.com" - - config_attr :endpoint, nil, ::String, nil - config_attr :credentials, nil do |value| - allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil] - allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC - allowed.any? { |klass| klass === value } - end - config_attr :scope, nil, ::String, ::Array, nil - config_attr :lib_name, nil, ::String, nil - config_attr :lib_version, nil, ::String, nil - config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil) - config_attr :interceptors, nil, ::Array, nil - config_attr :timeout, nil, ::Numeric, nil - config_attr :metadata, nil, ::Hash, nil - config_attr :retry_policy, nil, ::Hash, ::Proc, nil - config_attr :quota_project, nil, ::String, nil - config_attr :universe_domain, nil, ::String, nil - - # @private - def initialize parent_config = nil - @parent_config = parent_config unless parent_config.nil? - - yield self if block_given? - end - - ## - # Configurations for individual RPCs - # @return [Rpcs] - # - def rpcs - @rpcs ||= begin - parent_rpcs = nil - parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs) - Rpcs.new parent_rpcs - end - end - - ## - # Configuration for the channel pool - # @return [::Gapic::ServiceStub::ChannelPool::Configuration] - # - def channel_pool - @channel_pool ||= ::Gapic::ServiceStub::ChannelPool::Configuration.new - end - - ## - # Configuration RPC class for the DataTransferService API. - # - # Includes fields providing the configuration for each RPC in this service. - # Each configuration object is of type `Gapic::Config::Method` and includes - # the following configuration fields: - # - # * `timeout` (*type:* `Numeric`) - The call timeout in seconds - # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers - # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields - # include the following keys: - # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. - # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. - # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. - # * `:retry_codes` (*type:* `Array`) - The error codes that should - # trigger a retry. - # - class Rpcs - ## - # RPC-specific configuration for `get_data_source` - # @return [::Gapic::Config::Method] - # - attr_reader :get_data_source - ## - # RPC-specific configuration for `list_data_sources` - # @return [::Gapic::Config::Method] - # - attr_reader :list_data_sources - ## - # RPC-specific configuration for `create_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :create_transfer_config - ## - # RPC-specific configuration for `update_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :update_transfer_config - ## - # RPC-specific configuration for `delete_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :delete_transfer_config - ## - # RPC-specific configuration for `get_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :get_transfer_config - ## - # RPC-specific configuration for `list_transfer_configs` - # @return [::Gapic::Config::Method] - # - attr_reader :list_transfer_configs - ## - # RPC-specific configuration for `schedule_transfer_runs` - # @return [::Gapic::Config::Method] - # - attr_reader :schedule_transfer_runs - ## - # RPC-specific configuration for `start_manual_transfer_runs` - # @return [::Gapic::Config::Method] - # - attr_reader :start_manual_transfer_runs - ## - # RPC-specific configuration for `get_transfer_run` - # @return [::Gapic::Config::Method] - # - attr_reader :get_transfer_run - ## - # RPC-specific configuration for `delete_transfer_run` - # @return [::Gapic::Config::Method] - # - attr_reader :delete_transfer_run - ## - # RPC-specific configuration for `list_transfer_runs` - # @return [::Gapic::Config::Method] - # - attr_reader :list_transfer_runs - ## - # RPC-specific configuration for `list_transfer_logs` - # @return [::Gapic::Config::Method] - # - attr_reader :list_transfer_logs - ## - # RPC-specific configuration for `check_valid_creds` - # @return [::Gapic::Config::Method] - # - attr_reader :check_valid_creds - ## - # RPC-specific configuration for `enroll_data_sources` - # @return [::Gapic::Config::Method] - # - attr_reader :enroll_data_sources - ## - # RPC-specific configuration for `unenroll_data_sources` - # @return [::Gapic::Config::Method] - # - attr_reader :unenroll_data_sources - - # @private - def initialize parent_rpcs = nil - get_data_source_config = parent_rpcs.get_data_source if parent_rpcs.respond_to? :get_data_source - @get_data_source = ::Gapic::Config::Method.new get_data_source_config - list_data_sources_config = parent_rpcs.list_data_sources if parent_rpcs.respond_to? :list_data_sources - @list_data_sources = ::Gapic::Config::Method.new list_data_sources_config - create_transfer_config_config = parent_rpcs.create_transfer_config if parent_rpcs.respond_to? :create_transfer_config - @create_transfer_config = ::Gapic::Config::Method.new create_transfer_config_config - update_transfer_config_config = parent_rpcs.update_transfer_config if parent_rpcs.respond_to? :update_transfer_config - @update_transfer_config = ::Gapic::Config::Method.new update_transfer_config_config - delete_transfer_config_config = parent_rpcs.delete_transfer_config if parent_rpcs.respond_to? :delete_transfer_config - @delete_transfer_config = ::Gapic::Config::Method.new delete_transfer_config_config - get_transfer_config_config = parent_rpcs.get_transfer_config if parent_rpcs.respond_to? :get_transfer_config - @get_transfer_config = ::Gapic::Config::Method.new get_transfer_config_config - list_transfer_configs_config = parent_rpcs.list_transfer_configs if parent_rpcs.respond_to? :list_transfer_configs - @list_transfer_configs = ::Gapic::Config::Method.new list_transfer_configs_config - schedule_transfer_runs_config = parent_rpcs.schedule_transfer_runs if parent_rpcs.respond_to? :schedule_transfer_runs - @schedule_transfer_runs = ::Gapic::Config::Method.new schedule_transfer_runs_config - start_manual_transfer_runs_config = parent_rpcs.start_manual_transfer_runs if parent_rpcs.respond_to? :start_manual_transfer_runs - @start_manual_transfer_runs = ::Gapic::Config::Method.new start_manual_transfer_runs_config - get_transfer_run_config = parent_rpcs.get_transfer_run if parent_rpcs.respond_to? :get_transfer_run - @get_transfer_run = ::Gapic::Config::Method.new get_transfer_run_config - delete_transfer_run_config = parent_rpcs.delete_transfer_run if parent_rpcs.respond_to? :delete_transfer_run - @delete_transfer_run = ::Gapic::Config::Method.new delete_transfer_run_config - list_transfer_runs_config = parent_rpcs.list_transfer_runs if parent_rpcs.respond_to? :list_transfer_runs - @list_transfer_runs = ::Gapic::Config::Method.new list_transfer_runs_config - list_transfer_logs_config = parent_rpcs.list_transfer_logs if parent_rpcs.respond_to? :list_transfer_logs - @list_transfer_logs = ::Gapic::Config::Method.new list_transfer_logs_config - check_valid_creds_config = parent_rpcs.check_valid_creds if parent_rpcs.respond_to? :check_valid_creds - @check_valid_creds = ::Gapic::Config::Method.new check_valid_creds_config - enroll_data_sources_config = parent_rpcs.enroll_data_sources if parent_rpcs.respond_to? :enroll_data_sources - @enroll_data_sources = ::Gapic::Config::Method.new enroll_data_sources_config - unenroll_data_sources_config = parent_rpcs.unenroll_data_sources if parent_rpcs.respond_to? :unenroll_data_sources - @unenroll_data_sources = ::Gapic::Config::Method.new unenroll_data_sources_config - - yield self if block_given? - end - end - end - end - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb deleted file mode 100644 index 4fe857bc5f97..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials.rb +++ /dev/null @@ -1,53 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "googleauth" - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - module DataTransferService - # Credentials for the DataTransferService API. - class Credentials < ::Google::Auth::Credentials - self.scope = [ - "https://www.googleapis.com/auth/cloud-platform" - ] - self.env_vars = [ - "DATA_TRANSFER_CREDENTIALS", - "DATA_TRANSFER_KEYFILE", - "GOOGLE_CLOUD_CREDENTIALS", - "GOOGLE_CLOUD_KEYFILE", - "GCLOUD_KEYFILE", - "DATA_TRANSFER_CREDENTIALS_JSON", - "DATA_TRANSFER_KEYFILE_JSON", - "GOOGLE_CLOUD_CREDENTIALS_JSON", - "GOOGLE_CLOUD_KEYFILE_JSON", - "GCLOUD_KEYFILE_JSON" - ] - self.paths = [ - "~/.config/google_cloud/application_default_credentials.json" - ] - end - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb deleted file mode 100644 index 3083e79e4b05..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths.rb +++ /dev/null @@ -1,193 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - module DataTransferService - # Path helper methods for the DataTransferService API. - module Paths - ## - # Create a fully-qualified DataSource resource string. - # - # @overload data_source_path(project:, data_source:) - # The resource will be in the following format: - # - # `projects/{project}/dataSources/{data_source}` - # - # @param project [String] - # @param data_source [String] - # - # @overload data_source_path(project:, location:, data_source:) - # The resource will be in the following format: - # - # `projects/{project}/locations/{location}/dataSources/{data_source}` - # - # @param project [String] - # @param location [String] - # @param data_source [String] - # - # @return [::String] - def data_source_path **args - resources = { - "data_source:project" => (proc do |project:, data_source:| - raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" - - "projects/#{project}/dataSources/#{data_source}" - end), - "data_source:location:project" => (proc do |project:, location:, data_source:| - raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" - raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/" - - "projects/#{project}/locations/#{location}/dataSources/#{data_source}" - end) - } - - resource = resources[args.keys.sort.join(":")] - raise ::ArgumentError, "no resource found for values #{args.keys}" if resource.nil? - resource.call(**args) - end - - ## - # Create a fully-qualified Location resource string. - # - # The resource will be in the following format: - # - # `projects/{project}/locations/{location}` - # - # @param project [String] - # @param location [String] - # - # @return [::String] - def location_path project:, location: - raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" - - "projects/#{project}/locations/#{location}" - end - - ## - # Create a fully-qualified Project resource string. - # - # The resource will be in the following format: - # - # `projects/{project}` - # - # @param project [String] - # - # @return [::String] - def project_path project: - "projects/#{project}" - end - - ## - # Create a fully-qualified Run resource string. - # - # @overload run_path(project:, transfer_config:, run:) - # The resource will be in the following format: - # - # `projects/{project}/transferConfigs/{transfer_config}/runs/{run}` - # - # @param project [String] - # @param transfer_config [String] - # @param run [String] - # - # @overload run_path(project:, location:, transfer_config:, run:) - # The resource will be in the following format: - # - # `projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}` - # - # @param project [String] - # @param location [String] - # @param transfer_config [String] - # @param run [String] - # - # @return [::String] - def run_path **args - resources = { - "project:run:transfer_config" => (proc do |project:, transfer_config:, run:| - raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" - raise ::ArgumentError, "transfer_config cannot contain /" if transfer_config.to_s.include? "/" - - "projects/#{project}/transferConfigs/#{transfer_config}/runs/#{run}" - end), - "location:project:run:transfer_config" => (proc do |project:, location:, transfer_config:, run:| - raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" - raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/" - raise ::ArgumentError, "transfer_config cannot contain /" if transfer_config.to_s.include? "/" - - "projects/#{project}/locations/#{location}/transferConfigs/#{transfer_config}/runs/#{run}" - end) - } - - resource = resources[args.keys.sort.join(":")] - raise ::ArgumentError, "no resource found for values #{args.keys}" if resource.nil? - resource.call(**args) - end - - ## - # Create a fully-qualified TransferConfig resource string. - # - # @overload transfer_config_path(project:, transfer_config:) - # The resource will be in the following format: - # - # `projects/{project}/transferConfigs/{transfer_config}` - # - # @param project [String] - # @param transfer_config [String] - # - # @overload transfer_config_path(project:, location:, transfer_config:) - # The resource will be in the following format: - # - # `projects/{project}/locations/{location}/transferConfigs/{transfer_config}` - # - # @param project [String] - # @param location [String] - # @param transfer_config [String] - # - # @return [::String] - def transfer_config_path **args - resources = { - "project:transfer_config" => (proc do |project:, transfer_config:| - raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" - - "projects/#{project}/transferConfigs/#{transfer_config}" - end), - "location:project:transfer_config" => (proc do |project:, location:, transfer_config:| - raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/" - raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/" - - "projects/#{project}/locations/#{location}/transferConfigs/#{transfer_config}" - end) - } - - resource = resources[args.keys.sort.join(":")] - raise ::ArgumentError, "no resource found for values #{args.keys}" if resource.nil? - resource.call(**args) - end - - extend self - end - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb deleted file mode 100644 index 5a33667e70ce..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest.rb +++ /dev/null @@ -1,55 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "gapic/rest" -require "gapic/config" -require "gapic/config/method" - -require "google/cloud/bigquery/data_transfer/v1/version" -require "google/cloud/bigquery/data_transfer/v1/bindings_override" - -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/credentials" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/paths" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client" - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - ## - # This API allows users to manage their data transfers into BigQuery. - # - # To load this service and instantiate a REST client: - # - # require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - module DataTransferService - # Client for the REST transport - module Rest - end - end - end - end - end - end -end - -helper_path = ::File.join __dir__, "rest", "helpers.rb" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/helpers" if ::File.file? helper_path diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb deleted file mode 100644 index dd029392dfa6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/client.rb +++ /dev/null @@ -1,1995 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "google/cloud/errors" -require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub" -require "google/cloud/location/rest" - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - module DataTransferService - module Rest - ## - # REST client for the DataTransferService service. - # - # This API allows users to manage their data transfers into BigQuery. - # - class Client - # @private - API_VERSION = "" - - # @private - DEFAULT_ENDPOINT_TEMPLATE = "bigquerydatatransfer.$UNIVERSE_DOMAIN$" - - include Paths - - # @private - attr_reader :data_transfer_service_stub - - ## - # Configure the DataTransferService Client class. - # - # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration} - # for a description of the configuration fields. - # - # @example - # - # # Modify the configuration for all DataTransferService clients - # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.configure do |config| - # config.timeout = 10.0 - # end - # - # @yield [config] Configure the Client client. - # @yieldparam config [Client::Configuration] - # - # @return [Client::Configuration] - # - def self.configure - @configure ||= begin - namespace = ["Google", "Cloud", "Bigquery", "DataTransfer", "V1"] - parent_config = while namespace.any? - parent_name = namespace.join "::" - parent_const = const_get parent_name - break parent_const.configure if parent_const.respond_to? :configure - namespace.pop - end - default_config = Client::Configuration.new parent_config - - default_config.rpcs.get_data_source.timeout = 20.0 - default_config.rpcs.get_data_source.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_data_sources.timeout = 20.0 - default_config.rpcs.list_data_sources.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.create_transfer_config.timeout = 30.0 - - default_config.rpcs.update_transfer_config.timeout = 30.0 - - default_config.rpcs.delete_transfer_config.timeout = 20.0 - default_config.rpcs.delete_transfer_config.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.get_transfer_config.timeout = 20.0 - default_config.rpcs.get_transfer_config.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_transfer_configs.timeout = 20.0 - default_config.rpcs.list_transfer_configs.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.schedule_transfer_runs.timeout = 30.0 - - default_config.rpcs.get_transfer_run.timeout = 20.0 - default_config.rpcs.get_transfer_run.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.delete_transfer_run.timeout = 20.0 - default_config.rpcs.delete_transfer_run.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_transfer_runs.timeout = 20.0 - default_config.rpcs.list_transfer_runs.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.list_transfer_logs.timeout = 20.0 - default_config.rpcs.list_transfer_logs.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config.rpcs.check_valid_creds.timeout = 20.0 - default_config.rpcs.check_valid_creds.retry_policy = { - initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4] - } - - default_config - end - yield @configure if block_given? - @configure - end - - ## - # Configure the DataTransferService Client instance. - # - # The configuration is set to the derived mode, meaning that values can be changed, - # but structural changes (adding new fields, etc.) are not allowed. Structural changes - # should be made on {Client.configure}. - # - # See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration} - # for a description of the configuration fields. - # - # @yield [config] Configure the Client client. - # @yieldparam config [Client::Configuration] - # - # @return [Client::Configuration] - # - def configure - yield @config if block_given? - @config - end - - ## - # The effective universe domain - # - # @return [String] - # - def universe_domain - @data_transfer_service_stub.universe_domain - end - - ## - # Create a new DataTransferService REST client object. - # - # @example - # - # # Create a client using the default configuration - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a client using a custom configuration - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - # config.timeout = 10.0 - # end - # - # @yield [config] Configure the DataTransferService client. - # @yieldparam config [Client::Configuration] - # - def initialize - # Create the configuration object - @config = Configuration.new Client.configure - - # Yield the configuration if needed - yield @config if block_given? - - # Create credentials - credentials = @config.credentials - # Use self-signed JWT if the endpoint is unchanged from default, - # but only if the default endpoint does not have a region prefix. - enable_self_signed_jwt = @config.endpoint.nil? || - (@config.endpoint == Configuration::DEFAULT_ENDPOINT && - !@config.endpoint.split(".").first.include?("-")) - credentials ||= Credentials.default scope: @config.scope, - enable_self_signed_jwt: enable_self_signed_jwt - if credentials.is_a?(::String) || credentials.is_a?(::Hash) - credentials = Credentials.new credentials, scope: @config.scope - end - - @quota_project_id = @config.quota_project - @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id - - @data_transfer_service_stub = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.new( - endpoint: @config.endpoint, - endpoint_template: DEFAULT_ENDPOINT_TEMPLATE, - universe_domain: @config.universe_domain, - credentials: credentials - ) - - @location_client = Google::Cloud::Location::Locations::Rest::Client.new do |config| - config.credentials = credentials - config.quota_project = @quota_project_id - config.endpoint = @data_transfer_service_stub.endpoint - config.universe_domain = @data_transfer_service_stub.universe_domain - config.bindings_override = @config.bindings_override - end - end - - ## - # Get the associated client for mix-in of the Locations. - # - # @return [Google::Cloud::Location::Locations::Rest::Client] - # - attr_reader :location_client - - # Service calls - - ## - # Retrieves a supported data source and returns its settings. - # - # @overload get_data_source(request, options = nil) - # Pass arguments to `get_data_source` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload get_data_source(name: nil) - # Pass arguments to `get_data_source` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/dataSources/{data_source_id}` or - # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new - # - # # Call the get_data_source method. - # result = client.get_data_source request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource. - # p result - # - def get_data_source request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.get_data_source.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.get_data_source.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.get_data_source.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.get_data_source request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Lists supported data sources and returns their settings. - # - # @overload list_data_sources(request, options = nil) - # Pass arguments to `list_data_sources` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload list_data_sources(parent: nil, page_token: nil, page_size: nil) - # Pass arguments to `list_data_sources` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. The BigQuery project id for which data sources should be - # returned. Must be in the form: `projects/{project_id}` or - # `projects/{project_id}/locations/{location_id}` - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListDataSourcesRequest` list results. For multiple-page - # results, `ListDataSourcesResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new - # - # # Call the list_data_sources method. - # result = client.list_data_sources request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource. - # p item - # end - # - def list_data_sources request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.list_data_sources.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.list_data_sources.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.list_data_sources.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.list_data_sources request, options do |result, operation| - result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_data_sources, "data_sources", request, result, options - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Creates a new data transfer configuration. - # - # @overload create_transfer_config(request, options = nil) - # Pass arguments to `create_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload create_transfer_config(parent: nil, transfer_config: nil, authorization_code: nil, version_info: nil, service_account_name: nil) - # Pass arguments to `create_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. The BigQuery project id where the transfer configuration should - # be created. Must be in the format - # projects/\\{project_id}/locations/\\{location_id} or projects/\\{project_id}. If - # specified location and location of the destination bigquery dataset do not - # match - the request will fail. - # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] - # Required. Data transfer configuration to create. - # @param authorization_code [::String] - # Deprecated: Authorization code was required when - # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used - # in any data sources. Use `version_info` instead. - # - # Optional OAuth2 authorization code to use with this transfer configuration. - # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' - # and new credentials are needed, as indicated by `CheckValidCreds`. In order - # to obtain authorization_code, make a request to the following URL: - #
-                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-                #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # create the transfer config. - # @param version_info [::String] - # Optional version info. This parameter replaces `authorization_code` which - # is no longer used in any data sources. This is required only if - # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials - # are needed, as indicated by `CheckValidCreds`. In order to obtain version - # info, make a request to the following URL: - #
-                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-                #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # create the transfer config. - # @param service_account_name [::String] - # Optional service account email. If this field is set, the transfer config - # will be created with this service account's credentials. It requires that - # the requesting user calling this API has permissions to act as this service - # account. - # - # Note that not all data sources support service account credentials when - # creating a transfer config. For the latest list of data sources, read about - # [using service - # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new - # - # # Call the create_transfer_config method. - # result = client.create_transfer_config request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p result - # - def create_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.create_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.create_transfer_config.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.create_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.create_transfer_config request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Updates a data transfer configuration. - # All fields must be set, even if they are not updated. - # - # @overload update_transfer_config(request, options = nil) - # Pass arguments to `update_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload update_transfer_config(transfer_config: nil, authorization_code: nil, update_mask: nil, version_info: nil, service_account_name: nil) - # Pass arguments to `update_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param transfer_config [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig, ::Hash] - # Required. Data transfer configuration to create. - # @param authorization_code [::String] - # Deprecated: Authorization code was required when - # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used - # in any data sources. Use `version_info` instead. - # - # Optional OAuth2 authorization code to use with this transfer configuration. - # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' - # and new credentials are needed, as indicated by `CheckValidCreds`. In order - # to obtain authorization_code, make a request to the following URL: - #
-                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-                #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # update the transfer config. - # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] - # Required. Required list of fields to be updated in this request. - # @param version_info [::String] - # Optional version info. This parameter replaces `authorization_code` which - # is no longer used in any data sources. This is required only if - # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials - # are needed, as indicated by `CheckValidCreds`. In order to obtain version - # info, make a request to the following URL: - #
-                #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-                #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # update the transfer config. - # @param service_account_name [::String] - # Optional service account email. If this field is set, the transfer config - # will be created with this service account's credentials. It requires that - # the requesting user calling this API has permissions to act as this service - # account. - # - # Note that not all data sources support service account credentials when - # creating a transfer config. For the latest list of data sources, read about - # [using service - # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new - # - # # Call the update_transfer_config method. - # result = client.update_transfer_config request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p result - # - def update_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.update_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.update_transfer_config.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.update_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.update_transfer_config request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Deletes a data transfer configuration, including any associated transfer - # runs and logs. - # - # @overload delete_transfer_config(request, options = nil) - # Pass arguments to `delete_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload delete_transfer_config(name: nil) - # Pass arguments to `delete_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new - # - # # Call the delete_transfer_config method. - # result = client.delete_transfer_config request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def delete_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.delete_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.delete_transfer_config.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.delete_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.delete_transfer_config request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about a data transfer config. - # - # @overload get_transfer_config(request, options = nil) - # Pass arguments to `get_transfer_config` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload get_transfer_config(name: nil) - # Pass arguments to `get_transfer_config` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new - # - # # Call the get_transfer_config method. - # result = client.get_transfer_config request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p result - # - def get_transfer_config request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.get_transfer_config.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.get_transfer_config.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.get_transfer_config.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.get_transfer_config request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about all transfer configs owned by a project in the - # specified location. - # - # @overload list_transfer_configs(request, options = nil) - # Pass arguments to `list_transfer_configs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload list_transfer_configs(parent: nil, data_source_ids: nil, page_token: nil, page_size: nil) - # Pass arguments to `list_transfer_configs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. The BigQuery project id for which transfer configs - # should be returned: `projects/{project_id}` or - # `projects/{project_id}/locations/{location_id}` - # @param data_source_ids [::Array<::String>] - # When specified, only configurations of requested data sources are returned. - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransfersRequest` list results. For multiple-page - # results, `ListTransfersResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new - # - # # Call the list_transfer_configs method. - # result = client.list_transfer_configs request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - # p item - # end - # - def list_transfer_configs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.list_transfer_configs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.list_transfer_configs.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.list_transfer_configs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.list_transfer_configs request, options do |result, operation| - result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_configs, "transfer_configs", request, result, options - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Creates transfer runs for a time range [start_time, end_time]. - # For each date - or whatever granularity the data source supports - in the - # range, one transfer run is created. - # Note that runs are created per UTC time in the time range. - # DEPRECATED: use StartManualTransferRuns instead. - # - # @overload schedule_transfer_runs(request, options = nil) - # Pass arguments to `schedule_transfer_runs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload schedule_transfer_runs(parent: nil, start_time: nil, end_time: nil) - # Pass arguments to `schedule_transfer_runs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Transfer configuration name in the form: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @param start_time [::Google::Protobuf::Timestamp, ::Hash] - # Required. Start time of the range of transfer runs. For example, - # `"2017-05-25T00:00:00+00:00"`. - # @param end_time [::Google::Protobuf::Timestamp, ::Hash] - # Required. End time of the range of transfer runs. For example, - # `"2017-05-30T00:00:00+00:00"`. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new - # - # # Call the schedule_transfer_runs method. - # result = client.schedule_transfer_runs request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse. - # p result - # - def schedule_transfer_runs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.schedule_transfer_runs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.schedule_transfer_runs.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.schedule_transfer_runs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.schedule_transfer_runs request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Start manual transfer runs to be executed now with schedule_time equal to - # current time. The transfer runs can be created for a time range where the - # run_time is between start_time (inclusive) and end_time (exclusive), or for - # a specific run_time. - # - # @overload start_manual_transfer_runs(request, options = nil) - # Pass arguments to `start_manual_transfer_runs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload start_manual_transfer_runs(parent: nil, requested_time_range: nil, requested_run_time: nil) - # Pass arguments to `start_manual_transfer_runs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Transfer configuration name in the form: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @param requested_time_range [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange, ::Hash] - # A time_range start and end timestamp for historical data files or reports - # that are scheduled to be transferred by the scheduled transfer run. - # requested_time_range must be a past time and cannot include future time - # values. - # @param requested_run_time [::Google::Protobuf::Timestamp, ::Hash] - # A run_time timestamp for historical data files or reports - # that are scheduled to be transferred by the scheduled transfer run. - # requested_run_time must be a past time and cannot include future time - # values. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new - # - # # Call the start_manual_transfer_runs method. - # result = client.start_manual_transfer_runs request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse. - # p result - # - def start_manual_transfer_runs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.start_manual_transfer_runs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.start_manual_transfer_runs.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.start_manual_transfer_runs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.start_manual_transfer_runs request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about the particular transfer run. - # - # @overload get_transfer_run(request, options = nil) - # Pass arguments to `get_transfer_run` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload get_transfer_run(name: nil) - # Pass arguments to `get_transfer_run` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - # or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new - # - # # Call the get_transfer_run method. - # result = client.get_transfer_run request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. - # p result - # - def get_transfer_run request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.get_transfer_run.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.get_transfer_run.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.get_transfer_run.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.get_transfer_run request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Deletes the specified transfer run. - # - # @overload delete_transfer_run(request, options = nil) - # Pass arguments to `delete_transfer_run` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload delete_transfer_run(name: nil) - # Pass arguments to `delete_transfer_run` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - # or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new - # - # # Call the delete_transfer_run method. - # result = client.delete_transfer_run request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def delete_transfer_run request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.delete_transfer_run.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.delete_transfer_run.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.delete_transfer_run.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.delete_transfer_run request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns information about running and completed transfer runs. - # - # @overload list_transfer_runs(request, options = nil) - # Pass arguments to `list_transfer_runs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload list_transfer_runs(parent: nil, states: nil, page_token: nil, page_size: nil, run_attempt: nil) - # Pass arguments to `list_transfer_runs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Name of transfer configuration for which transfer runs should be - # retrieved. Format of transfer configuration resource name is: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @param states [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferState>] - # When specified, only transfer runs with requested states are returned. - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransferRunsRequest` list results. For multiple-page - # results, `ListTransferRunsResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @param run_attempt [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt] - # Indicates how run attempts are to be pulled. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new - # - # # Call the list_transfer_runs method. - # result = client.list_transfer_runs request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. - # p item - # end - # - def list_transfer_runs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.list_transfer_runs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.list_transfer_runs.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.list_transfer_runs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.list_transfer_runs request, options do |result, operation| - result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_runs, "transfer_runs", request, result, options - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns log messages for the transfer run. - # - # @overload list_transfer_logs(request, options = nil) - # Pass arguments to `list_transfer_logs` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload list_transfer_logs(parent: nil, page_token: nil, page_size: nil, message_types: nil) - # Pass arguments to `list_transfer_logs` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param parent [::String] - # Required. Transfer run name in the form: - # `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - # @param page_token [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransferLogsRequest` list results. For multiple-page - # results, `ListTransferLogsResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @param page_size [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @param message_types [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>] - # Message types to return. If not populated - INFO, WARNING and ERROR - # messages are returned. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Gapic::Rest::PagedEnumerable<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new - # - # # Call the list_transfer_logs method. - # result = client.list_transfer_logs request - # - # # The returned object is of type Gapic::PagedEnumerable. You can iterate - # # over elements, and API calls will be issued to fetch pages as needed. - # result.each do |item| - # # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage. - # p item - # end - # - def list_transfer_logs request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.list_transfer_logs.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.list_transfer_logs.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.list_transfer_logs.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.list_transfer_logs request, options do |result, operation| - result = ::Gapic::Rest::PagedEnumerable.new @data_transfer_service_stub, :list_transfer_logs, "transfer_messages", request, result, options - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Returns true if valid credentials exist for the given data source and - # requesting user. - # - # @overload check_valid_creds(request, options = nil) - # Pass arguments to `check_valid_creds` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload check_valid_creds(name: nil) - # Pass arguments to `check_valid_creds` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The data source in the form: - # `projects/{project_id}/dataSources/{data_source_id}` or - # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new - # - # # Call the check_valid_creds method. - # result = client.check_valid_creds request - # - # # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse. - # p result - # - def check_valid_creds request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.check_valid_creds.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.check_valid_creds.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.check_valid_creds.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.check_valid_creds request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Enroll data sources in a user project. This allows users to create transfer - # configurations for these data sources. They will also appear in the - # ListDataSources RPC and as such, will appear in the - # [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents - # can be found in the public guide for - # [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and - # [Data Transfer - # Service](https://cloud.google.com/bigquery/docs/working-with-transfers). - # - # @overload enroll_data_sources(request, options = nil) - # Pass arguments to `enroll_data_sources` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload enroll_data_sources(name: nil, data_source_ids: nil) - # Pass arguments to `enroll_data_sources` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The name of the project resource in the form: - # `projects/{project_id}` - # @param data_source_ids [::Array<::String>] - # Data sources that are enrolled. It is required to provide at least one - # data source id. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new - # - # # Call the enroll_data_sources method. - # result = client.enroll_data_sources request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def enroll_data_sources request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.enroll_data_sources.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.enroll_data_sources.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.enroll_data_sources.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.enroll_data_sources request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Unenroll data sources in a user project. This allows users to remove - # transfer configurations for these data sources. They will no longer appear - # in the ListDataSources RPC and will also no longer appear in the [BigQuery - # UI](https://console.cloud.google.com/bigquery). Data transfers - # configurations of unenrolled data sources will not be scheduled. - # - # @overload unenroll_data_sources(request, options = nil) - # Pass arguments to `unenroll_data_sources` via a request object, either of type - # {::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest} or an equivalent Hash. - # - # @param request [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, ::Hash] - # A request object representing the call parameters. Required. To specify no - # parameters, or to keep all the default parameter values, pass an empty Hash. - # @param options [::Gapic::CallOptions, ::Hash] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @overload unenroll_data_sources(name: nil, data_source_ids: nil) - # Pass arguments to `unenroll_data_sources` via keyword arguments. Note that at - # least one keyword argument is required. To specify no parameters, or to keep all - # the default parameter values, pass an empty Hash as a request object (see above). - # - # @param name [::String] - # Required. The name of the project resource in the form: - # `projects/{project_id}` - # @param data_source_ids [::Array<::String>] - # Data sources that are unenrolled. It is required to provide at least one - # data source id. - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # - # @raise [::Google::Cloud::Error] if the REST call is aborted. - # - # @example Basic example - # require "google/cloud/bigquery/data_transfer/v1" - # - # # Create a client object. The client can be reused for multiple calls. - # client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - # # Create a request. To set request fields, pass in keyword arguments. - # request = Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new - # - # # Call the unenroll_data_sources method. - # result = client.unenroll_data_sources request - # - # # The returned object is of type Google::Protobuf::Empty. - # p result - # - def unenroll_data_sources request, options = nil - raise ::ArgumentError, "request must be provided" if request.nil? - - request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest - - # Converts hash and nil to an options object - options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h - - # Customize the options with defaults - call_metadata = @config.rpcs.unenroll_data_sources.metadata.to_h - - # Set x-goog-api-client, x-goog-user-project and x-goog-api-version headers - call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ - lib_name: @config.lib_name, lib_version: @config.lib_version, - gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION, - transports_version_send: [:rest] - - call_metadata[:"x-goog-api-version"] = API_VERSION unless API_VERSION.empty? - call_metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id - - options.apply_defaults timeout: @config.rpcs.unenroll_data_sources.timeout, - metadata: call_metadata, - retry_policy: @config.rpcs.unenroll_data_sources.retry_policy - - options.apply_defaults timeout: @config.timeout, - metadata: @config.metadata, - retry_policy: @config.retry_policy - - @data_transfer_service_stub.unenroll_data_sources request, options do |result, operation| - yield result, operation if block_given? - return result - end - rescue ::Gapic::Rest::Error => e - raise ::Google::Cloud::Error.from_error(e) - end - - ## - # Configuration class for the DataTransferService REST API. - # - # This class represents the configuration for DataTransferService REST, - # providing control over timeouts, retry behavior, logging, transport - # parameters, and other low-level controls. Certain parameters can also be - # applied individually to specific RPCs. See - # {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration::Rpcs} - # for a list of RPCs that can be configured independently. - # - # Configuration can be applied globally to all clients, or to a single client - # on construction. - # - # @example - # - # # Modify the global config, setting the timeout for - # # get_data_source to 20 seconds, - # # and all remaining timeouts to 10 seconds. - # ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.configure do |config| - # config.timeout = 10.0 - # config.rpcs.get_data_source.timeout = 20.0 - # end - # - # # Apply the above configuration only to a new client. - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - # config.timeout = 10.0 - # config.rpcs.get_data_source.timeout = 20.0 - # end - # - # @!attribute [rw] endpoint - # A custom service endpoint, as a hostname or hostname:port. The default is - # nil, indicating to use the default endpoint in the current universe domain. - # @return [::String,nil] - # @!attribute [rw] credentials - # Credentials to send with calls. You may provide any of the following types: - # * (`String`) The path to a service account key file in JSON format - # * (`Hash`) A service account key as a Hash - # * (`Google::Auth::Credentials`) A googleauth credentials object - # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials)) - # * (`Signet::OAuth2::Client`) A signet oauth2 client object - # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client)) - # * (`nil`) indicating no credentials - # @return [::Object] - # @!attribute [rw] scope - # The OAuth scopes - # @return [::Array<::String>] - # @!attribute [rw] lib_name - # The library name as recorded in instrumentation and logging - # @return [::String] - # @!attribute [rw] lib_version - # The library version as recorded in instrumentation and logging - # @return [::String] - # @!attribute [rw] timeout - # The call timeout in seconds. - # @return [::Numeric] - # @!attribute [rw] metadata - # Additional headers to be sent with the call. - # @return [::Hash{::Symbol=>::String}] - # @!attribute [rw] retry_policy - # The retry policy. The value is a hash with the following keys: - # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. - # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. - # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. - # * `:retry_codes` (*type:* `Array`) - The error codes that should - # trigger a retry. - # @return [::Hash] - # @!attribute [rw] quota_project - # A separate project against which to charge quota. - # @return [::String] - # @!attribute [rw] universe_domain - # The universe domain within which to make requests. This determines the - # default endpoint URL. The default value of nil uses the environment - # universe (usually the default "googleapis.com" universe). - # @return [::String,nil] - # - class Configuration - extend ::Gapic::Config - - # @private - # The endpoint specific to the default "googleapis.com" universe. Deprecated. - DEFAULT_ENDPOINT = "bigquerydatatransfer.googleapis.com" - - config_attr :endpoint, nil, ::String, nil - config_attr :credentials, nil do |value| - allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil] - allowed.any? { |klass| klass === value } - end - config_attr :scope, nil, ::String, ::Array, nil - config_attr :lib_name, nil, ::String, nil - config_attr :lib_version, nil, ::String, nil - config_attr :timeout, nil, ::Numeric, nil - config_attr :metadata, nil, ::Hash, nil - config_attr :retry_policy, nil, ::Hash, ::Proc, nil - config_attr :quota_project, nil, ::String, nil - config_attr :universe_domain, nil, ::String, nil - - # @private - # Overrides for http bindings for the RPCs of this service - # are only used when this service is used as mixin, and only - # by the host service. - # @return [::Hash{::Symbol=>::Array<::Gapic::Rest::GrpcTranscoder::HttpBinding>}] - config_attr :bindings_override, {}, ::Hash, nil - - # @private - def initialize parent_config = nil - @parent_config = parent_config unless parent_config.nil? - - yield self if block_given? - end - - ## - # Configurations for individual RPCs - # @return [Rpcs] - # - def rpcs - @rpcs ||= begin - parent_rpcs = nil - parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs) - Rpcs.new parent_rpcs - end - end - - ## - # Configuration RPC class for the DataTransferService API. - # - # Includes fields providing the configuration for each RPC in this service. - # Each configuration object is of type `Gapic::Config::Method` and includes - # the following configuration fields: - # - # * `timeout` (*type:* `Numeric`) - The call timeout in seconds - # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional headers - # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields - # include the following keys: - # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. - # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. - # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. - # * `:retry_codes` (*type:* `Array`) - The error codes that should - # trigger a retry. - # - class Rpcs - ## - # RPC-specific configuration for `get_data_source` - # @return [::Gapic::Config::Method] - # - attr_reader :get_data_source - ## - # RPC-specific configuration for `list_data_sources` - # @return [::Gapic::Config::Method] - # - attr_reader :list_data_sources - ## - # RPC-specific configuration for `create_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :create_transfer_config - ## - # RPC-specific configuration for `update_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :update_transfer_config - ## - # RPC-specific configuration for `delete_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :delete_transfer_config - ## - # RPC-specific configuration for `get_transfer_config` - # @return [::Gapic::Config::Method] - # - attr_reader :get_transfer_config - ## - # RPC-specific configuration for `list_transfer_configs` - # @return [::Gapic::Config::Method] - # - attr_reader :list_transfer_configs - ## - # RPC-specific configuration for `schedule_transfer_runs` - # @return [::Gapic::Config::Method] - # - attr_reader :schedule_transfer_runs - ## - # RPC-specific configuration for `start_manual_transfer_runs` - # @return [::Gapic::Config::Method] - # - attr_reader :start_manual_transfer_runs - ## - # RPC-specific configuration for `get_transfer_run` - # @return [::Gapic::Config::Method] - # - attr_reader :get_transfer_run - ## - # RPC-specific configuration for `delete_transfer_run` - # @return [::Gapic::Config::Method] - # - attr_reader :delete_transfer_run - ## - # RPC-specific configuration for `list_transfer_runs` - # @return [::Gapic::Config::Method] - # - attr_reader :list_transfer_runs - ## - # RPC-specific configuration for `list_transfer_logs` - # @return [::Gapic::Config::Method] - # - attr_reader :list_transfer_logs - ## - # RPC-specific configuration for `check_valid_creds` - # @return [::Gapic::Config::Method] - # - attr_reader :check_valid_creds - ## - # RPC-specific configuration for `enroll_data_sources` - # @return [::Gapic::Config::Method] - # - attr_reader :enroll_data_sources - ## - # RPC-specific configuration for `unenroll_data_sources` - # @return [::Gapic::Config::Method] - # - attr_reader :unenroll_data_sources - - # @private - def initialize parent_rpcs = nil - get_data_source_config = parent_rpcs.get_data_source if parent_rpcs.respond_to? :get_data_source - @get_data_source = ::Gapic::Config::Method.new get_data_source_config - list_data_sources_config = parent_rpcs.list_data_sources if parent_rpcs.respond_to? :list_data_sources - @list_data_sources = ::Gapic::Config::Method.new list_data_sources_config - create_transfer_config_config = parent_rpcs.create_transfer_config if parent_rpcs.respond_to? :create_transfer_config - @create_transfer_config = ::Gapic::Config::Method.new create_transfer_config_config - update_transfer_config_config = parent_rpcs.update_transfer_config if parent_rpcs.respond_to? :update_transfer_config - @update_transfer_config = ::Gapic::Config::Method.new update_transfer_config_config - delete_transfer_config_config = parent_rpcs.delete_transfer_config if parent_rpcs.respond_to? :delete_transfer_config - @delete_transfer_config = ::Gapic::Config::Method.new delete_transfer_config_config - get_transfer_config_config = parent_rpcs.get_transfer_config if parent_rpcs.respond_to? :get_transfer_config - @get_transfer_config = ::Gapic::Config::Method.new get_transfer_config_config - list_transfer_configs_config = parent_rpcs.list_transfer_configs if parent_rpcs.respond_to? :list_transfer_configs - @list_transfer_configs = ::Gapic::Config::Method.new list_transfer_configs_config - schedule_transfer_runs_config = parent_rpcs.schedule_transfer_runs if parent_rpcs.respond_to? :schedule_transfer_runs - @schedule_transfer_runs = ::Gapic::Config::Method.new schedule_transfer_runs_config - start_manual_transfer_runs_config = parent_rpcs.start_manual_transfer_runs if parent_rpcs.respond_to? :start_manual_transfer_runs - @start_manual_transfer_runs = ::Gapic::Config::Method.new start_manual_transfer_runs_config - get_transfer_run_config = parent_rpcs.get_transfer_run if parent_rpcs.respond_to? :get_transfer_run - @get_transfer_run = ::Gapic::Config::Method.new get_transfer_run_config - delete_transfer_run_config = parent_rpcs.delete_transfer_run if parent_rpcs.respond_to? :delete_transfer_run - @delete_transfer_run = ::Gapic::Config::Method.new delete_transfer_run_config - list_transfer_runs_config = parent_rpcs.list_transfer_runs if parent_rpcs.respond_to? :list_transfer_runs - @list_transfer_runs = ::Gapic::Config::Method.new list_transfer_runs_config - list_transfer_logs_config = parent_rpcs.list_transfer_logs if parent_rpcs.respond_to? :list_transfer_logs - @list_transfer_logs = ::Gapic::Config::Method.new list_transfer_logs_config - check_valid_creds_config = parent_rpcs.check_valid_creds if parent_rpcs.respond_to? :check_valid_creds - @check_valid_creds = ::Gapic::Config::Method.new check_valid_creds_config - enroll_data_sources_config = parent_rpcs.enroll_data_sources if parent_rpcs.respond_to? :enroll_data_sources - @enroll_data_sources = ::Gapic::Config::Method.new enroll_data_sources_config - unenroll_data_sources_config = parent_rpcs.unenroll_data_sources if parent_rpcs.respond_to? :unenroll_data_sources - @unenroll_data_sources = ::Gapic::Config::Method.new unenroll_data_sources_config - - yield self if block_given? - end - end - end - end - end - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb deleted file mode 100644 index 26ea0d750773..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest/service_stub.rb +++ /dev/null @@ -1,1133 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - module DataTransferService - module Rest - ## - # REST service stub for the DataTransferService service. - # Service stub contains baseline method implementations - # including transcoding, making the REST call, and deserialing the response. - # - class ServiceStub - def initialize endpoint:, endpoint_template:, universe_domain:, credentials: - # These require statements are intentionally placed here to initialize - # the REST modules only when it's required. - require "gapic/rest" - - @client_stub = ::Gapic::Rest::ClientStub.new endpoint: endpoint, - endpoint_template: endpoint_template, - universe_domain: universe_domain, - credentials: credentials, - numeric_enums: true, - raise_faraday_errors: false - end - - ## - # The effective universe domain - # - # @return [String] - # - def universe_domain - @client_stub.universe_domain - end - - ## - # The effective endpoint - # - # @return [String] - # - def endpoint - @client_stub.endpoint - end - - ## - # Baseline implementation for the get_data_source REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource] - # A result object deserialized from the server's reply - def get_data_source request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_get_data_source_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the list_data_sources REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse] - # A result object deserialized from the server's reply - def list_data_sources request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_list_data_sources_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the create_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # A result object deserialized from the server's reply - def create_transfer_config request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_create_transfer_config_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the update_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # A result object deserialized from the server's reply - def update_transfer_config request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_update_transfer_config_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the delete_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # A result object deserialized from the server's reply - def delete_transfer_config request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_delete_transfer_config_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the get_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # A result object deserialized from the server's reply - def get_transfer_config request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_get_transfer_config_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the list_transfer_configs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse] - # A result object deserialized from the server's reply - def list_transfer_configs request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_list_transfer_configs_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the schedule_transfer_runs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse] - # A result object deserialized from the server's reply - def schedule_transfer_runs request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_schedule_transfer_runs_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the start_manual_transfer_runs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse] - # A result object deserialized from the server's reply - def start_manual_transfer_runs request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_start_manual_transfer_runs_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the get_transfer_run REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun] - # A result object deserialized from the server's reply - def get_transfer_run request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_get_transfer_run_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the delete_transfer_run REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # A result object deserialized from the server's reply - def delete_transfer_run request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_delete_transfer_run_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the list_transfer_runs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse] - # A result object deserialized from the server's reply - def list_transfer_runs request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_list_transfer_runs_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the list_transfer_logs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse] - # A result object deserialized from the server's reply - def list_transfer_logs request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_list_transfer_logs_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the check_valid_creds REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse] - # A result object deserialized from the server's reply - def check_valid_creds request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_check_valid_creds_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the enroll_data_sources REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # A result object deserialized from the server's reply - def enroll_data_sources request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_enroll_data_sources_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # Baseline implementation for the unenroll_data_sources REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest] - # A request object representing the call parameters. Required. - # @param options [::Gapic::CallOptions] - # Overrides the default settings for this call, e.g, timeout, retries etc. Optional. - # - # @yield [result, operation] Access the result along with the TransportOperation object - # @yieldparam result [::Google::Protobuf::Empty] - # @yieldparam operation [::Gapic::Rest::TransportOperation] - # - # @return [::Google::Protobuf::Empty] - # A result object deserialized from the server's reply - def unenroll_data_sources request_pb, options = nil - raise ::ArgumentError, "request must be provided" if request_pb.nil? - - verb, uri, query_string_params, body = ServiceStub.transcode_unenroll_data_sources_request request_pb - query_string_params = if query_string_params.any? - query_string_params.to_h { |p| p.split "=", 2 } - else - {} - end - - response = @client_stub.make_http_request( - verb, - uri: uri, - body: body || "", - params: query_string_params, - options: options - ) - operation = ::Gapic::Rest::TransportOperation.new response - result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true - - yield result, operation if block_given? - result - end - - ## - # @private - # - # GRPC transcoding helper method for the get_data_source REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_get_data_source_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :get, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/dataSources/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :get, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/dataSources/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the list_data_sources REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_list_data_sources_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/dataSources", - matches: [ - ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/dataSources", - matches: [ - ["parent", %r{^projects/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the create_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_create_transfer_config_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :post, - uri_template: "/v1/{parent}/transferConfigs", - body: "transfer_config", - matches: [ - ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :post, - uri_template: "/v1/{parent}/transferConfigs", - body: "transfer_config", - matches: [ - ["parent", %r{^projects/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the update_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_update_transfer_config_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :patch, - uri_template: "/v1/{transfer_config.name}", - body: "transfer_config", - matches: [ - ["transfer_config.name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :patch, - uri_template: "/v1/{transfer_config.name}", - body: "transfer_config", - matches: [ - ["transfer_config.name", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the delete_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_delete_transfer_config_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :delete, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :delete, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the get_transfer_config REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_get_transfer_config_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :get, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :get, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the list_transfer_configs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_list_transfer_configs_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/transferConfigs", - matches: [ - ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/transferConfigs", - matches: [ - ["parent", %r{^projects/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the schedule_transfer_runs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_schedule_transfer_runs_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :post, - uri_template: "/v1/{parent}:scheduleRuns", - body: "*", - matches: [ - ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :post, - uri_template: "/v1/{parent}:scheduleRuns", - body: "*", - matches: [ - ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the start_manual_transfer_runs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_start_manual_transfer_runs_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :post, - uri_template: "/v1/{parent}:startManualRuns", - body: "*", - matches: [ - ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :post, - uri_template: "/v1/{parent}:startManualRuns", - body: "*", - matches: [ - ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the get_transfer_run REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_get_transfer_run_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :get, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :get, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the delete_transfer_run REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_delete_transfer_run_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :delete, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :delete, - uri_template: "/v1/{name}", - matches: [ - ["name", %r{^projects/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the list_transfer_runs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_list_transfer_runs_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/runs", - matches: [ - ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/runs", - matches: [ - ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the list_transfer_logs REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_list_transfer_logs_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/transferLogs", - matches: [ - ["parent", %r{^projects/[^/]+/locations/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :get, - uri_template: "/v1/{parent}/transferLogs", - matches: [ - ["parent", %r{^projects/[^/]+/transferConfigs/[^/]+/runs/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the check_valid_creds REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_check_valid_creds_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :post, - uri_template: "/v1/{name}:checkValidCreds", - body: "*", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/dataSources/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :post, - uri_template: "/v1/{name}:checkValidCreds", - body: "*", - matches: [ - ["name", %r{^projects/[^/]+/dataSources/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the enroll_data_sources REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_enroll_data_sources_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :post, - uri_template: "/v1/{name}:enrollDataSources", - body: "*", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/?$}, false] - ] - ) - .with_bindings( - uri_method: :post, - uri_template: "/v1/{name}:enrollDataSources", - body: "*", - matches: [ - ["name", %r{^projects/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - - ## - # @private - # - # GRPC transcoding helper method for the unenroll_data_sources REST call - # - # @param request_pb [::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest] - # A request object representing the call parameters. Required. - # @return [Array(String, [String, nil], Hash{String => String})] - # Uri, Body, Query string parameters - def self.transcode_unenroll_data_sources_request request_pb - transcoder = Gapic::Rest::GrpcTranscoder.new - .with_bindings( - uri_method: :post, - uri_template: "/v1/{name}:unenrollDataSources", - body: "*", - matches: [ - ["name", %r{^projects/[^/]+/locations/[^/]+/?$}, false] - ] - ) - transcoder.transcode request_pb - end - end - end - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb deleted file mode 100644 index 57c28f8adc68..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/rest.rb +++ /dev/null @@ -1,40 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" -require "google/cloud/bigquery/data_transfer/v1/bindings_override" -require "google/cloud/bigquery/data_transfer/v1/version" - -module Google - module Cloud - module Bigquery - module DataTransfer - ## - # To load just the REST part of this package, including all its services, and instantiate a REST client: - # - # @example - # - # require "google/cloud/bigquery/data_transfer/v1/rest" - # client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new - # - module V1 - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb deleted file mode 100644 index c65948a1833c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/data_transfer/v1/version.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - VERSION = "0.0.1" - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb deleted file mode 100644 index c01dc686f687..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb +++ /dev/null @@ -1,89 +0,0 @@ -# frozen_string_literal: true -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto - -require 'google/protobuf' - -require 'google/api/annotations_pb' -require 'google/api/client_pb' -require 'google/api/field_behavior_pb' -require 'google/api/resource_pb' -require 'google/cloud/bigquery/datatransfer/v1/transfer_pb' -require 'google/protobuf/duration_pb' -require 'google/protobuf/empty_pb' -require 'google/protobuf/field_mask_pb' -require 'google/protobuf/timestamp_pb' -require 'google/protobuf/wrappers_pb' - - -descriptor_data = "\n8google/cloud/bigquery/datatransfer/v1/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x34google/cloud/bigquery/datatransfer/v1/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x8f\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08\"s\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06\x12\x08\n\x04LIST\x10\x07\"\x9c\t\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x8a\x01\n\x11\x41uthorizationType\x12\"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12\"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02\x12\x15\n\x11\x46IRST_PARTY_OAUTH\x10\x03\"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:\xa5\x01\xea\x41\xa1\x01\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}\x12\x41projects/{project}/locations/{location}/dataSources/{data_source}\"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource\"\x87\x01\n\x16ListDataSourcesRequest\x12\x46\n\x06parent\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\x12.bigquerydatatransfer.googleapis.com/DataSource\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"\x92\x02\n\x1b\x43reateTransferConfigRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1e\n\x12\x61uthorization_code\x18\x03 \x01(\tB\x02\x18\x01\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t\"\xfc\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1e\n\x12\x61uthorization_code\x18\x03 \x01(\tB\x02\x18\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t\"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\"V\n\x15GetTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\"Y\n\x18\x44\x65leteTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\"\xa8\x01\n\x1aListTransferConfigsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"\xde\x02\n\x17ListTransferRunsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\x12\'bigquerydatatransfer.googleapis.com/Run\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt\"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01\"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"\xe0\x01\n\x17ListTransferLogsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource\"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08\"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\"\x8a\x03\n\x1eStartManualTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time\"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\"F\n\x18\x45nrollDataSourcesRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\"H\n\x1aUnenrollDataSourcesRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t2\xd5\"\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource\"e\xda\x41\x04name\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse\"g\xda\x41\x06parent\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\"\xa2\x01\xda\x41\x16parent,transfer_config\x82\xd3\xe4\x93\x02\x82\x01\"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:\"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\"\xc7\x01\xda\x41\x1btransfer_config,update_mask\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty\"m\xda\x41\x04name\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\"m\xda\x41\x04name\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse\"o\xda\x41\x06parent\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse\"\xab\x01\x88\x02\x01\xda\x41\x1aparent,start_time,end_time\x82\xd3\xe4\x93\x02\x84\x01\"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;\"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse\"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01\"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>\"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\"{\xda\x41\x04name\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty\"{\xda\x41\x04name\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse\"}\xda\x41\x06parent\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse\"\x9c\x01\xda\x41\x06parent\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse\"\x8b\x01\xda\x41\x04name\x82\xd3\xe4\x93\x02~\"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8\"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\x12\xda\x01\n\x11\x45nrollDataSources\x12?.google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest\x1a\x16.google.protobuf.Empty\"l\x82\xd3\xe4\x93\x02\x66\"3/v1/{name=projects/*/locations/*}:enrollDataSources:\x01*Z,\"\'/v1/{name=projects/*}:enrollDataSources:\x01*\x12\xb2\x01\n\x13UnenrollDataSources\x12\x41.google.cloud.bigquery.datatransfer.v1.UnenrollDataSourcesRequest\x1a\x16.google.protobuf.Empty\"@\x82\xd3\xe4\x93\x02:\"5/v1/{name=projects/*/locations/*}:unenrollDataSources:\x01*\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x02\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZMcloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb;datatransferpb\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3" - -pool = Google::Protobuf::DescriptorPool.generated_pool - -begin - pool.add_serialized_file(descriptor_data) -rescue TypeError - # Compatibility code: will be removed in the next major version. - require 'google/protobuf/descriptor_pb' - parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data) - parsed.clear_dependency - serialized = parsed.class.encode(parsed) - file = pool.add_serialized_file(serialized) - warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}" - imports = [ - ["google.protobuf.DoubleValue", "google/protobuf/wrappers.proto"], - ["google.protobuf.Duration", "google/protobuf/duration.proto"], - ["google.cloud.bigquery.datatransfer.v1.TransferConfig", "google/cloud/bigquery/datatransfer/v1/transfer.proto"], - ["google.protobuf.FieldMask", "google/protobuf/field_mask.proto"], - ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"], - ] - imports.each do |type_name, expected_filename| - import_file = pool.lookup(type_name).file_descriptor - if import_file.name != expected_filename - warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}" - end - end - warn "Each proto file must use a consistent fully-qualified name." - warn "This will become an error in the next major version." -end - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - DataSourceParameter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter").msgclass - DataSourceParameter::Type = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type").enummodule - DataSource = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource").msgclass - DataSource::AuthorizationType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType").enummodule - DataSource::DataRefreshType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType").enummodule - GetDataSourceRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest").msgclass - ListDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest").msgclass - ListDataSourcesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse").msgclass - CreateTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest").msgclass - UpdateTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest").msgclass - GetTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest").msgclass - DeleteTransferConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest").msgclass - GetTransferRunRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest").msgclass - DeleteTransferRunRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest").msgclass - ListTransferConfigsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest").msgclass - ListTransferConfigsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse").msgclass - ListTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest").msgclass - ListTransferRunsRequest::RunAttempt = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt").enummodule - ListTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse").msgclass - ListTransferLogsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest").msgclass - ListTransferLogsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse").msgclass - CheckValidCredsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest").msgclass - CheckValidCredsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse").msgclass - ScheduleTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest").msgclass - ScheduleTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse").msgclass - StartManualTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest").msgclass - StartManualTransferRunsRequest::TimeRange = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange").msgclass - StartManualTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse").msgclass - EnrollDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest").msgclass - UnenrollDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UnenrollDataSourcesRequest").msgclass - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb deleted file mode 100644 index a6e2ee48a2e0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb +++ /dev/null @@ -1,99 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# Source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto for package 'Google.Cloud.Bigquery.DataTransfer.V1' -# Original file comments: -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -require 'grpc' -require 'google/cloud/bigquery/datatransfer/v1/datatransfer_pb' - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - module DataTransferService - # This API allows users to manage their data transfers into BigQuery. - class Service - - include ::GRPC::GenericService - - self.marshal_class_method = :encode - self.unmarshal_class_method = :decode - self.service_name = 'google.cloud.bigquery.datatransfer.v1.DataTransferService' - - # Retrieves a supported data source and returns its settings. - rpc :GetDataSource, ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource - # Lists supported data sources and returns their settings. - rpc :ListDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse - # Creates a new data transfer configuration. - rpc :CreateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig - # Updates a data transfer configuration. - # All fields must be set, even if they are not updated. - rpc :UpdateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig - # Deletes a data transfer configuration, including any associated transfer - # runs and logs. - rpc :DeleteTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Google::Protobuf::Empty - # Returns information about a data transfer config. - rpc :GetTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig - # Returns information about all transfer configs owned by a project in the - # specified location. - rpc :ListTransferConfigs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse - # Creates transfer runs for a time range [start_time, end_time]. - # For each date - or whatever granularity the data source supports - in the - # range, one transfer run is created. - # Note that runs are created per UTC time in the time range. - # DEPRECATED: use StartManualTransferRuns instead. - rpc :ScheduleTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse - # Start manual transfer runs to be executed now with schedule_time equal to - # current time. The transfer runs can be created for a time range where the - # run_time is between start_time (inclusive) and end_time (exclusive), or for - # a specific run_time. - rpc :StartManualTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse - # Returns information about the particular transfer run. - rpc :GetTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun - # Deletes the specified transfer run. - rpc :DeleteTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Google::Protobuf::Empty - # Returns information about running and completed transfer runs. - rpc :ListTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse - # Returns log messages for the transfer run. - rpc :ListTransferLogs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse - # Returns true if valid credentials exist for the given data source and - # requesting user. - rpc :CheckValidCreds, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse - # Enroll data sources in a user project. This allows users to create transfer - # configurations for these data sources. They will also appear in the - # ListDataSources RPC and as such, will appear in the - # [BigQuery UI](https://console.cloud.google.com/bigquery), and the documents - # can be found in the public guide for - # [BigQuery Web UI](https://cloud.google.com/bigquery/bigquery-web-ui) and - # [Data Transfer - # Service](https://cloud.google.com/bigquery/docs/working-with-transfers). - rpc :EnrollDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Google::Protobuf::Empty - # Unenroll data sources in a user project. This allows users to remove - # transfer configurations for these data sources. They will no longer appear - # in the ListDataSources RPC and will also no longer appear in the [BigQuery - # UI](https://console.cloud.google.com/bigquery). Data transfers - # configurations of unenrolled data sources will not be scheduled. - rpc :UnenrollDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, ::Google::Protobuf::Empty - end - - Stub = Service.rpc_stub_class - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb deleted file mode 100644 index 2572e94d32a4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb +++ /dev/null @@ -1,68 +0,0 @@ -# frozen_string_literal: true -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer/v1/transfer.proto - -require 'google/protobuf' - -require 'google/api/field_behavior_pb' -require 'google/api/resource_pb' -require 'google/protobuf/struct_pb' -require 'google/protobuf/timestamp_pb' -require 'google/protobuf/wrappers_pb' -require 'google/rpc/status_pb' - - -descriptor_data = "\n4google/cloud/bigquery/datatransfer/v1/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08\"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xa7\x02\n\x11ScheduleOptionsV2\x12W\n\x13time_based_schedule\x18\x01 \x01(\x0b\x32\x38.google.cloud.bigquery.datatransfer.v1.TimeBasedScheduleH\x00\x12P\n\x0fmanual_schedule\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.ManualScheduleH\x00\x12[\n\x15\x65vent_driven_schedule\x18\x03 \x01(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.EventDrivenScheduleH\x00\x42\n\n\x08schedule\"\x83\x01\n\x11TimeBasedSchedule\x12\x10\n\x08schedule\x18\x01 \x01(\t\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x10\n\x0eManualSchedule\"2\n\x13\x45ventDrivenSchedule\x12\x1b\n\x13pubsub_subscription\x18\x01 \x01(\t\"(\n\x08UserInfo\x12\x12\n\x05\x65mail\x18\x01 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_email\"\x9b\t\n\x0eTransferConfig\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12U\n\x13schedule_options_v2\x18\x1f \x01(\x0b\x32\x38.google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences\x12M\n\nowner_info\x18\x1b \x01(\x0b\x32/.google.cloud.bigquery.datatransfer.v1.UserInfoB\x03\xe0\x41\x03H\x01\x88\x01\x01\x12`\n\x18\x65ncryption_configuration\x18\x1c \x01(\x0b\x32>.google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration\x12&\n\x05\x65rror\x18 \x01(\x0b\x32\x12.google.rpc.StatusB\x03\xe0\x41\x03:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stinationB\r\n\x0b_owner_info\"M\n\x17\x45ncryptionConfiguration\x12\x32\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xff\x06\n\x0bTransferRun\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination\"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t\"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\x8f\x02\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZMcloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb;datatransferpb\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3" - -pool = Google::Protobuf::DescriptorPool.generated_pool - -begin - pool.add_serialized_file(descriptor_data) -rescue TypeError - # Compatibility code: will be removed in the next major version. - require 'google/protobuf/descriptor_pb' - parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data) - parsed.clear_dependency - serialized = parsed.class.encode(parsed) - file = pool.add_serialized_file(serialized) - warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}" - imports = [ - ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"], - ["google.protobuf.Struct", "google/protobuf/struct.proto"], - ["google.rpc.Status", "google/rpc/status.proto"], - ["google.protobuf.StringValue", "google/protobuf/wrappers.proto"], - ] - imports.each do |type_name, expected_filename| - import_file = pool.lookup(type_name).file_descriptor - if import_file.name != expected_filename - warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}" - end - end - warn "Each proto file must use a consistent fully-qualified name." - warn "This will become an error in the next major version." -end - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - EmailPreferences = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EmailPreferences").msgclass - ScheduleOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptions").msgclass - ScheduleOptionsV2 = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2").msgclass - TimeBasedSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TimeBasedSchedule").msgclass - ManualSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ManualSchedule").msgclass - EventDrivenSchedule = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EventDrivenSchedule").msgclass - UserInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UserInfo").msgclass - TransferConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferConfig").msgclass - EncryptionConfiguration = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EncryptionConfiguration").msgclass - TransferRun = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferRun").msgclass - TransferMessage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferMessage").msgclass - TransferMessage::MessageSeverity = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity").enummodule - TransferType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferType").enummodule - TransferState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferState").enummodule - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md deleted file mode 100644 index 614f3cdea732..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# BigQuery Data Transfer Service V1 Protocol Buffer Documentation - -These files are for the YARD documentation of the generated protobuf files. -They are not intended to be required or loaded at runtime. diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb deleted file mode 100644 index 2223d8935069..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/client.rb +++ /dev/null @@ -1,420 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Api - # Required information for every language. - # @!attribute [rw] reference_docs_uri - # @deprecated This field is deprecated and may be removed in the next major version update. - # @return [::String] - # Link to automatically generated reference documentation. Example: - # https://cloud.google.com/nodejs/docs/reference/asset/latest - # @!attribute [rw] destinations - # @return [::Array<::Google::Api::ClientLibraryDestination>] - # The destination where API teams want this client library to be published. - class CommonLanguageSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Details about how and where to publish client libraries. - # @!attribute [rw] version - # @return [::String] - # Version of the API to apply these settings to. This is the full protobuf - # package for the API, ending in the version element. - # Examples: "google.cloud.speech.v1" and "google.spanner.admin.database.v1". - # @!attribute [rw] launch_stage - # @return [::Google::Api::LaunchStage] - # Launch stage of this version of the API. - # @!attribute [rw] rest_numeric_enums - # @return [::Boolean] - # When using transport=rest, the client request will encode enums as - # numbers rather than strings. - # @!attribute [rw] java_settings - # @return [::Google::Api::JavaSettings] - # Settings for legacy Java features, supported in the Service YAML. - # @!attribute [rw] cpp_settings - # @return [::Google::Api::CppSettings] - # Settings for C++ client libraries. - # @!attribute [rw] php_settings - # @return [::Google::Api::PhpSettings] - # Settings for PHP client libraries. - # @!attribute [rw] python_settings - # @return [::Google::Api::PythonSettings] - # Settings for Python client libraries. - # @!attribute [rw] node_settings - # @return [::Google::Api::NodeSettings] - # Settings for Node client libraries. - # @!attribute [rw] dotnet_settings - # @return [::Google::Api::DotnetSettings] - # Settings for .NET client libraries. - # @!attribute [rw] ruby_settings - # @return [::Google::Api::RubySettings] - # Settings for Ruby client libraries. - # @!attribute [rw] go_settings - # @return [::Google::Api::GoSettings] - # Settings for Go client libraries. - class ClientLibrarySettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # This message configures the settings for publishing [Google Cloud Client - # libraries](https://cloud.google.com/apis/docs/cloud-client-libraries) - # generated from the service config. - # @!attribute [rw] method_settings - # @return [::Array<::Google::Api::MethodSettings>] - # A list of API method settings, e.g. the behavior for methods that use the - # long-running operation pattern. - # @!attribute [rw] new_issue_uri - # @return [::String] - # Link to a *public* URI where users can report issues. Example: - # https://issuetracker.google.com/issues/new?component=190865&template=1161103 - # @!attribute [rw] documentation_uri - # @return [::String] - # Link to product home page. Example: - # https://cloud.google.com/asset-inventory/docs/overview - # @!attribute [rw] api_short_name - # @return [::String] - # Used as a tracking tag when collecting data about the APIs developer - # relations artifacts like docs, packages delivered to package managers, - # etc. Example: "speech". - # @!attribute [rw] github_label - # @return [::String] - # GitHub label to apply to issues and pull requests opened for this API. - # @!attribute [rw] codeowner_github_teams - # @return [::Array<::String>] - # GitHub teams to be added to CODEOWNERS in the directory in GitHub - # containing source code for the client libraries for this API. - # @!attribute [rw] doc_tag_prefix - # @return [::String] - # A prefix used in sample code when demarking regions to be included in - # documentation. - # @!attribute [rw] organization - # @return [::Google::Api::ClientLibraryOrganization] - # For whom the client library is being published. - # @!attribute [rw] library_settings - # @return [::Array<::Google::Api::ClientLibrarySettings>] - # Client library settings. If the same version string appears multiple - # times in this list, then the last one wins. Settings from earlier - # settings with the same version string are discarded. - # @!attribute [rw] proto_reference_documentation_uri - # @return [::String] - # Optional link to proto reference documentation. Example: - # https://cloud.google.com/pubsub/lite/docs/reference/rpc - # @!attribute [rw] rest_reference_documentation_uri - # @return [::String] - # Optional link to REST reference documentation. Example: - # https://cloud.google.com/pubsub/lite/docs/reference/rest - class Publishing - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Settings for Java client libraries. - # @!attribute [rw] library_package - # @return [::String] - # The package name to use in Java. Clobbers the java_package option - # set in the protobuf. This should be used **only** by APIs - # who have already set the language_settings.java.package_name" field - # in gapic.yaml. API teams should use the protobuf java_package option - # where possible. - # - # Example of a YAML configuration:: - # - # publishing: - # java_settings: - # library_package: com.google.cloud.pubsub.v1 - # @!attribute [rw] service_class_names - # @return [::Google::Protobuf::Map{::String => ::String}] - # Configure the Java class name to use instead of the service's for its - # corresponding generated GAPIC client. Keys are fully-qualified - # service names as they appear in the protobuf (including the full - # the language_settings.java.interface_names" field in gapic.yaml. API - # teams should otherwise use the service name as it appears in the - # protobuf. - # - # Example of a YAML configuration:: - # - # publishing: - # java_settings: - # service_class_names: - # - google.pubsub.v1.Publisher: TopicAdmin - # - google.pubsub.v1.Subscriber: SubscriptionAdmin - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - class JavaSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # @!attribute [rw] key - # @return [::String] - # @!attribute [rw] value - # @return [::String] - class ServiceClassNamesEntry - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end - - # Settings for C++ client libraries. - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - class CppSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Settings for Php client libraries. - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - class PhpSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Settings for Python client libraries. - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - # @!attribute [rw] experimental_features - # @return [::Google::Api::PythonSettings::ExperimentalFeatures] - # Experimental features to be included during client library generation. - class PythonSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # Experimental features to be included during client library generation. - # These fields will be deprecated once the feature graduates and is enabled - # by default. - # @!attribute [rw] rest_async_io_enabled - # @return [::Boolean] - # Enables generation of asynchronous REST clients if `rest` transport is - # enabled. By default, asynchronous REST clients will not be generated. - # This feature will be enabled by default 1 month after launching the - # feature in preview packages. - class ExperimentalFeatures - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end - - # Settings for Node client libraries. - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - class NodeSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Settings for Dotnet client libraries. - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - # @!attribute [rw] renamed_services - # @return [::Google::Protobuf::Map{::String => ::String}] - # Map from original service names to renamed versions. - # This is used when the default generated types - # would cause a naming conflict. (Neither name is - # fully-qualified.) - # Example: Subscriber to SubscriberServiceApi. - # @!attribute [rw] renamed_resources - # @return [::Google::Protobuf::Map{::String => ::String}] - # Map from full resource types to the effective short name - # for the resource. This is used when otherwise resource - # named from different services would cause naming collisions. - # Example entry: - # "datalabeling.googleapis.com/Dataset": "DataLabelingDataset" - # @!attribute [rw] ignored_resources - # @return [::Array<::String>] - # List of full resource types to ignore during generation. - # This is typically used for API-specific Location resources, - # which should be handled by the generator as if they were actually - # the common Location resources. - # Example entry: "documentai.googleapis.com/Location" - # @!attribute [rw] forced_namespace_aliases - # @return [::Array<::String>] - # Namespaces which must be aliased in snippets due to - # a known (but non-generator-predictable) naming collision - # @!attribute [rw] handwritten_signatures - # @return [::Array<::String>] - # Method signatures (in the form "service.method(signature)") - # which are provided separately, so shouldn't be generated. - # Snippets *calling* these methods are still generated, however. - class DotnetSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # @!attribute [rw] key - # @return [::String] - # @!attribute [rw] value - # @return [::String] - class RenamedServicesEntry - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # @!attribute [rw] key - # @return [::String] - # @!attribute [rw] value - # @return [::String] - class RenamedResourcesEntry - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end - - # Settings for Ruby client libraries. - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - class RubySettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Settings for Go client libraries. - # @!attribute [rw] common - # @return [::Google::Api::CommonLanguageSettings] - # Some settings. - class GoSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Describes the generator configuration for a method. - # @!attribute [rw] selector - # @return [::String] - # The fully qualified name of the method, for which the options below apply. - # This is used to find the method to apply the options. - # - # Example: - # - # publishing: - # method_settings: - # - selector: google.storage.control.v2.StorageControl.CreateFolder - # # method settings for CreateFolder... - # @!attribute [rw] long_running - # @return [::Google::Api::MethodSettings::LongRunning] - # Describes settings to use for long-running operations when generating - # API methods for RPCs. Complements RPCs that use the annotations in - # google/longrunning/operations.proto. - # - # Example of a YAML configuration:: - # - # publishing: - # method_settings: - # - selector: google.cloud.speech.v2.Speech.BatchRecognize - # long_running: - # initial_poll_delay: 60s # 1 minute - # poll_delay_multiplier: 1.5 - # max_poll_delay: 360s # 6 minutes - # total_poll_timeout: 54000s # 90 minutes - # @!attribute [rw] auto_populated_fields - # @return [::Array<::String>] - # List of top-level fields of the request message, that should be - # automatically populated by the client libraries based on their - # (google.api.field_info).format. Currently supported format: UUID4. - # - # Example of a YAML configuration: - # - # publishing: - # method_settings: - # - selector: google.example.v1.ExampleService.CreateExample - # auto_populated_fields: - # - request_id - class MethodSettings - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # Describes settings to use when generating API methods that use the - # long-running operation pattern. - # All default values below are from those used in the client library - # generators (e.g. - # [Java](https://github.com/googleapis/gapic-generator-java/blob/04c2faa191a9b5a10b92392fe8482279c4404803/src/main/java/com/google/api/generator/gapic/composer/common/RetrySettingsComposer.java)). - # @!attribute [rw] initial_poll_delay - # @return [::Google::Protobuf::Duration] - # Initial delay after which the first poll request will be made. - # Default value: 5 seconds. - # @!attribute [rw] poll_delay_multiplier - # @return [::Float] - # Multiplier to gradually increase delay between subsequent polls until it - # reaches max_poll_delay. - # Default value: 1.5. - # @!attribute [rw] max_poll_delay - # @return [::Google::Protobuf::Duration] - # Maximum time between two subsequent poll requests. - # Default value: 45 seconds. - # @!attribute [rw] total_poll_timeout - # @return [::Google::Protobuf::Duration] - # Total polling timeout. - # Default value: 5 minutes. - class LongRunning - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end - - # The organization for which the client libraries are being published. - # Affects the url where generated docs are published, etc. - module ClientLibraryOrganization - # Not useful. - CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED = 0 - - # Google Cloud Platform Org. - CLOUD = 1 - - # Ads (Advertising) Org. - ADS = 2 - - # Photos Org. - PHOTOS = 3 - - # Street View Org. - STREET_VIEW = 4 - - # Shopping Org. - SHOPPING = 5 - - # Geo Org. - GEO = 6 - - # Generative AI - https://developers.generativeai.google - GENERATIVE_AI = 7 - end - - # To where should client libraries be published? - module ClientLibraryDestination - # Client libraries will neither be generated nor published to package - # managers. - CLIENT_LIBRARY_DESTINATION_UNSPECIFIED = 0 - - # Generate the client library in a repo under github.com/googleapis, - # but don't publish it to package managers. - GITHUB = 10 - - # Publish the library to package managers like nuget.org and npmjs.com. - PACKAGE_MANAGER = 20 - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb deleted file mode 100644 index b03587481349..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/field_behavior.rb +++ /dev/null @@ -1,85 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Api - # An indicator of the behavior of a given field (for example, that a field - # is required in requests, or given as output but ignored as input). - # This **does not** change the behavior in protocol buffers itself; it only - # denotes the behavior and may affect how API tooling handles the field. - # - # Note: This enum **may** receive new values in the future. - module FieldBehavior - # Conventional default for enums. Do not use this. - FIELD_BEHAVIOR_UNSPECIFIED = 0 - - # Specifically denotes a field as optional. - # While all fields in protocol buffers are optional, this may be specified - # for emphasis if appropriate. - OPTIONAL = 1 - - # Denotes a field as required. - # This indicates that the field **must** be provided as part of the request, - # and failure to do so will cause an error (usually `INVALID_ARGUMENT`). - REQUIRED = 2 - - # Denotes a field as output only. - # This indicates that the field is provided in responses, but including the - # field in a request does nothing (the server *must* ignore it and - # *must not* throw an error as a result of the field's presence). - OUTPUT_ONLY = 3 - - # Denotes a field as input only. - # This indicates that the field is provided in requests, and the - # corresponding field is not included in output. - INPUT_ONLY = 4 - - # Denotes a field as immutable. - # This indicates that the field may be set once in a request to create a - # resource, but may not be changed thereafter. - IMMUTABLE = 5 - - # Denotes that a (repeated) field is an unordered list. - # This indicates that the service may provide the elements of the list - # in any arbitrary order, rather than the order the user originally - # provided. Additionally, the list's order may or may not be stable. - UNORDERED_LIST = 6 - - # Denotes that this field returns a non-empty default value if not set. - # This indicates that if the user provides the empty value in a request, - # a non-empty value will be returned. The user will not be aware of what - # non-empty value to expect. - NON_EMPTY_DEFAULT = 7 - - # Denotes that the field in a resource (a message annotated with - # google.api.resource) is used in the resource name to uniquely identify the - # resource. For AIP-compliant APIs, this should only be applied to the - # `name` field on the resource. - # - # This behavior should not be applied to references to other resources within - # the message. - # - # The identifier field of resources often have different field behavior - # depending on the request it is embedded in (e.g. for Create methods name - # is optional and unused, while for Update methods it is required). Instead - # of method-specific annotations, only `IDENTIFIER` is required. - IDENTIFIER = 8 - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb deleted file mode 100644 index 38b4b61e6061..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/launch_stage.rb +++ /dev/null @@ -1,71 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Api - # The launch stage as defined by [Google Cloud Platform - # Launch Stages](https://cloud.google.com/terms/launch-stages). - module LaunchStage - # Do not use this default value. - LAUNCH_STAGE_UNSPECIFIED = 0 - - # The feature is not yet implemented. Users can not use it. - UNIMPLEMENTED = 6 - - # Prelaunch features are hidden from users and are only visible internally. - PRELAUNCH = 7 - - # Early Access features are limited to a closed group of testers. To use - # these features, you must sign up in advance and sign a Trusted Tester - # agreement (which includes confidentiality provisions). These features may - # be unstable, changed in backward-incompatible ways, and are not - # guaranteed to be released. - EARLY_ACCESS = 1 - - # Alpha is a limited availability test for releases before they are cleared - # for widespread use. By Alpha, all significant design issues are resolved - # and we are in the process of verifying functionality. Alpha customers - # need to apply for access, agree to applicable terms, and have their - # projects allowlisted. Alpha releases don't have to be feature complete, - # no SLAs are provided, and there are no technical support obligations, but - # they will be far enough along that customers can actually use them in - # test environments or for limited-use tests -- just like they would in - # normal production cases. - ALPHA = 2 - - # Beta is the point at which we are ready to open a release for any - # customer to use. There are no SLA or technical support obligations in a - # Beta release. Products will be complete from a feature perspective, but - # may have some open outstanding issues. Beta releases are suitable for - # limited production use cases. - BETA = 3 - - # GA features are open to all developers and are considered stable and - # fully qualified for production use. - GA = 4 - - # Deprecated features are scheduled to be shut down and removed. For more - # information, see the "Deprecation Policy" section of our [Terms of - # Service](https://cloud.google.com/terms/) - # and the [Google Cloud Platform Subject to the Deprecation - # Policy](https://cloud.google.com/terms/deprecation) documentation. - DEPRECATED = 5 - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb deleted file mode 100644 index 935946d52792..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/api/resource.rb +++ /dev/null @@ -1,227 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Api - # A simple descriptor of a resource type. - # - # ResourceDescriptor annotates a resource message (either by means of a - # protobuf annotation or use in the service config), and associates the - # resource's schema, the resource type, and the pattern of the resource name. - # - # Example: - # - # message Topic { - # // Indicates this message defines a resource schema. - # // Declares the resource type in the format of {service}/{kind}. - # // For Kubernetes resources, the format is {api group}/{kind}. - # option (google.api.resource) = { - # type: "pubsub.googleapis.com/Topic" - # pattern: "projects/{project}/topics/{topic}" - # }; - # } - # - # The ResourceDescriptor Yaml config will look like: - # - # resources: - # - type: "pubsub.googleapis.com/Topic" - # pattern: "projects/{project}/topics/{topic}" - # - # Sometimes, resources have multiple patterns, typically because they can - # live under multiple parents. - # - # Example: - # - # message LogEntry { - # option (google.api.resource) = { - # type: "logging.googleapis.com/LogEntry" - # pattern: "projects/{project}/logs/{log}" - # pattern: "folders/{folder}/logs/{log}" - # pattern: "organizations/{organization}/logs/{log}" - # pattern: "billingAccounts/{billing_account}/logs/{log}" - # }; - # } - # - # The ResourceDescriptor Yaml config will look like: - # - # resources: - # - type: 'logging.googleapis.com/LogEntry' - # pattern: "projects/{project}/logs/{log}" - # pattern: "folders/{folder}/logs/{log}" - # pattern: "organizations/{organization}/logs/{log}" - # pattern: "billingAccounts/{billing_account}/logs/{log}" - # @!attribute [rw] type - # @return [::String] - # The resource type. It must be in the format of - # \\{service_name}/\\{resource_type_kind}. The `resource_type_kind` must be - # singular and must not include version numbers. - # - # Example: `storage.googleapis.com/Bucket` - # - # The value of the resource_type_kind must follow the regular expression - # /[A-Za-z][a-zA-Z0-9]+/. It should start with an upper case character and - # should use PascalCase (UpperCamelCase). The maximum number of - # characters allowed for the `resource_type_kind` is 100. - # @!attribute [rw] pattern - # @return [::Array<::String>] - # Optional. The relative resource name pattern associated with this resource - # type. The DNS prefix of the full resource name shouldn't be specified here. - # - # The path pattern must follow the syntax, which aligns with HTTP binding - # syntax: - # - # Template = Segment { "/" Segment } ; - # Segment = LITERAL | Variable ; - # Variable = "{" LITERAL "}" ; - # - # Examples: - # - # - "projects/\\{project}/topics/\\{topic}" - # - "projects/\\{project}/knowledgeBases/\\{knowledge_base}" - # - # The components in braces correspond to the IDs for each resource in the - # hierarchy. It is expected that, if multiple patterns are provided, - # the same component name (e.g. "project") refers to IDs of the same - # type of resource. - # @!attribute [rw] name_field - # @return [::String] - # Optional. The field on the resource that designates the resource name - # field. If omitted, this is assumed to be "name". - # @!attribute [rw] history - # @return [::Google::Api::ResourceDescriptor::History] - # Optional. The historical or future-looking state of the resource pattern. - # - # Example: - # - # // The InspectTemplate message originally only supported resource - # // names with organization, and project was added later. - # message InspectTemplate { - # option (google.api.resource) = { - # type: "dlp.googleapis.com/InspectTemplate" - # pattern: - # "organizations/{organization}/inspectTemplates/{inspect_template}" - # pattern: "projects/{project}/inspectTemplates/{inspect_template}" - # history: ORIGINALLY_SINGLE_PATTERN - # }; - # } - # @!attribute [rw] plural - # @return [::String] - # The plural name used in the resource name and permission names, such as - # 'projects' for the resource name of 'projects/\\{project}' and the permission - # name of 'cloudresourcemanager.googleapis.com/projects.get'. One exception - # to this is for Nested Collections that have stuttering names, as defined - # in [AIP-122](https://google.aip.dev/122#nested-collections), where the - # collection ID in the resource name pattern does not necessarily directly - # match the `plural` value. - # - # It is the same concept of the `plural` field in k8s CRD spec - # https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ - # - # Note: The plural form is required even for singleton resources. See - # https://aip.dev/156 - # @!attribute [rw] singular - # @return [::String] - # The same concept of the `singular` field in k8s CRD spec - # https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ - # Such as "project" for the `resourcemanager.googleapis.com/Project` type. - # @!attribute [rw] style - # @return [::Array<::Google::Api::ResourceDescriptor::Style>] - # Style flag(s) for this resource. - # These indicate that a resource is expected to conform to a given - # style. See the specific style flags for additional information. - class ResourceDescriptor - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # A description of the historical or future-looking state of the - # resource pattern. - module History - # The "unset" value. - HISTORY_UNSPECIFIED = 0 - - # The resource originally had one pattern and launched as such, and - # additional patterns were added later. - ORIGINALLY_SINGLE_PATTERN = 1 - - # The resource has one pattern, but the API owner expects to add more - # later. (This is the inverse of ORIGINALLY_SINGLE_PATTERN, and prevents - # that from being necessary once there are multiple patterns.) - FUTURE_MULTI_PATTERN = 2 - end - - # A flag representing a specific style that a resource claims to conform to. - module Style - # The unspecified value. Do not use. - STYLE_UNSPECIFIED = 0 - - # This resource is intended to be "declarative-friendly". - # - # Declarative-friendly resources must be more strictly consistent, and - # setting this to true communicates to tools that this resource should - # adhere to declarative-friendly expectations. - # - # Note: This is used by the API linter (linter.aip.dev) to enable - # additional checks. - DECLARATIVE_FRIENDLY = 1 - end - end - - # Defines a proto annotation that describes a string field that refers to - # an API resource. - # @!attribute [rw] type - # @return [::String] - # The resource type that the annotated field references. - # - # Example: - # - # message Subscription { - # string topic = 2 [(google.api.resource_reference) = { - # type: "pubsub.googleapis.com/Topic" - # }]; - # } - # - # Occasionally, a field may reference an arbitrary resource. In this case, - # APIs use the special value * in their resource reference. - # - # Example: - # - # message GetIamPolicyRequest { - # string resource = 2 [(google.api.resource_reference) = { - # type: "*" - # }]; - # } - # @!attribute [rw] child_type - # @return [::String] - # The resource type of a child collection that the annotated field - # references. This is useful for annotating the `parent` field that - # doesn't have a fixed resource type. - # - # Example: - # - # message ListLogEntriesRequest { - # string parent = 1 [(google.api.resource_reference) = { - # child_type: "logging.googleapis.com/LogEntry" - # }; - # } - class ResourceReference - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb deleted file mode 100644 index 425e671dc477..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb +++ /dev/null @@ -1,722 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - # A parameter used to define custom fields in a data source definition. - # @!attribute [rw] param_id - # @return [::String] - # Parameter identifier. - # @!attribute [rw] display_name - # @return [::String] - # Parameter display name in the user interface. - # @!attribute [rw] description - # @return [::String] - # Parameter description. - # @!attribute [rw] type - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter::Type] - # Parameter type. - # @!attribute [rw] required - # @return [::Boolean] - # Is parameter required. - # @!attribute [rw] repeated - # @return [::Boolean] - # Deprecated. This field has no effect. - # @!attribute [rw] validation_regex - # @return [::String] - # Regular expression which can be used for parameter validation. - # @!attribute [rw] allowed_values - # @return [::Array<::String>] - # All possible values for the parameter. - # @!attribute [rw] min_value - # @return [::Google::Protobuf::DoubleValue] - # For integer and double values specifies minimum allowed value. - # @!attribute [rw] max_value - # @return [::Google::Protobuf::DoubleValue] - # For integer and double values specifies maximum allowed value. - # @!attribute [rw] fields - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter>] - # Deprecated. This field has no effect. - # @!attribute [rw] validation_description - # @return [::String] - # Description of the requirements for this field, in case the user input does - # not fulfill the regex pattern or min/max values. - # @!attribute [rw] validation_help_url - # @return [::String] - # URL to a help document to further explain the naming requirements. - # @!attribute [rw] immutable - # @return [::Boolean] - # Cannot be changed after initial creation. - # @!attribute [rw] recurse - # @return [::Boolean] - # Deprecated. This field has no effect. - # @!attribute [rw] deprecated - # @return [::Boolean] - # If true, it should not be used in new transfers, and it should not be - # visible to users. - class DataSourceParameter - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # Parameter type. - module Type - # Type unspecified. - TYPE_UNSPECIFIED = 0 - - # String parameter. - STRING = 1 - - # Integer parameter (64-bits). - # Will be serialized to json as string. - INTEGER = 2 - - # Double precision floating point parameter. - DOUBLE = 3 - - # Boolean parameter. - BOOLEAN = 4 - - # Deprecated. This field has no effect. - RECORD = 5 - - # Page ID for a Google+ Page. - PLUS_PAGE = 6 - - # List of strings parameter. - LIST = 7 - end - end - - # Defines the properties and custom parameters for a data source. - # @!attribute [r] name - # @return [::String] - # Output only. Data source resource name. - # @!attribute [rw] data_source_id - # @return [::String] - # Data source id. - # @!attribute [rw] display_name - # @return [::String] - # User friendly data source name. - # @!attribute [rw] description - # @return [::String] - # User friendly data source description string. - # @!attribute [rw] client_id - # @return [::String] - # Data source client id which should be used to receive refresh token. - # @!attribute [rw] scopes - # @return [::Array<::String>] - # Api auth scopes for which refresh token needs to be obtained. These are - # scopes needed by a data source to prepare data and ingest them into - # BigQuery, e.g., https://www.googleapis.com/auth/bigquery - # @!attribute [rw] transfer_type - # @deprecated This field is deprecated and may be removed in the next major version update. - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferType] - # Deprecated. This field has no effect. - # @!attribute [rw] supports_multiple_transfers - # @deprecated This field is deprecated and may be removed in the next major version update. - # @return [::Boolean] - # Deprecated. This field has no effect. - # @!attribute [rw] update_deadline_seconds - # @return [::Integer] - # The number of seconds to wait for an update from the data source - # before the Data Transfer Service marks the transfer as FAILED. - # @!attribute [rw] default_schedule - # @return [::String] - # Default data transfer schedule. - # Examples of valid schedules include: - # `1st,3rd monday of month 15:30`, - # `every wed,fri of jan,jun 13:15`, and - # `first sunday of quarter 00:00`. - # @!attribute [rw] supports_custom_schedule - # @return [::Boolean] - # Specifies whether the data source supports a user defined schedule, or - # operates on the default schedule. - # When set to `true`, user can override default schedule. - # @!attribute [rw] parameters - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter>] - # Data source parameters. - # @!attribute [rw] help_url - # @return [::String] - # Url for the help document for this data source. - # @!attribute [rw] authorization_type - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource::AuthorizationType] - # Indicates the type of authorization. - # @!attribute [rw] data_refresh_type - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::DataSource::DataRefreshType] - # Specifies whether the data source supports automatic data refresh for the - # past few days, and how it's supported. - # For some data sources, data might not be complete until a few days later, - # so it's useful to refresh data automatically. - # @!attribute [rw] default_data_refresh_window_days - # @return [::Integer] - # Default data refresh window on days. - # Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`. - # @!attribute [rw] manual_runs_disabled - # @return [::Boolean] - # Disables backfilling and manual run scheduling - # for the data source. - # @!attribute [rw] minimum_schedule_interval - # @return [::Google::Protobuf::Duration] - # The minimum interval for scheduler to schedule runs. - class DataSource - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # The type of authorization needed for this data source. - module AuthorizationType - # Type unspecified. - AUTHORIZATION_TYPE_UNSPECIFIED = 0 - - # Use OAuth 2 authorization codes that can be exchanged - # for a refresh token on the backend. - AUTHORIZATION_CODE = 1 - - # Return an authorization code for a given Google+ page that can then be - # exchanged for a refresh token on the backend. - GOOGLE_PLUS_AUTHORIZATION_CODE = 2 - - # Use First Party OAuth. - FIRST_PARTY_OAUTH = 3 - end - - # Represents how the data source supports data auto refresh. - module DataRefreshType - # The data source won't support data auto refresh, which is default value. - DATA_REFRESH_TYPE_UNSPECIFIED = 0 - - # The data source supports data auto refresh, and runs will be scheduled - # for the past few days. Does not allow custom values to be set for each - # transfer config. - SLIDING_WINDOW = 1 - - # The data source supports data auto refresh, and runs will be scheduled - # for the past few days. Allows custom values to be set for each transfer - # config. - CUSTOM_SLIDING_WINDOW = 2 - end - end - - # A request to get data source info. - # @!attribute [rw] name - # @return [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/dataSources/{data_source_id}` or - # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` - class GetDataSourceRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Request to list supported data sources and their data transfer settings. - # @!attribute [rw] parent - # @return [::String] - # Required. The BigQuery project id for which data sources should be - # returned. Must be in the form: `projects/{project_id}` or - # `projects/{project_id}/locations/{location_id}` - # @!attribute [rw] page_token - # @return [::String] - # Pagination token, which can be used to request a specific page - # of `ListDataSourcesRequest` list results. For multiple-page - # results, `ListDataSourcesResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @!attribute [rw] page_size - # @return [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - class ListDataSourcesRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Returns list of supported data sources and their metadata. - # @!attribute [rw] data_sources - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::DataSource>] - # List of supported data sources and their transfer settings. - # @!attribute [r] next_page_token - # @return [::String] - # Output only. The next-pagination token. For multiple-page list results, - # this token can be used as the - # `ListDataSourcesRequest.page_token` - # to request the next page of list results. - class ListDataSourcesResponse - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to create a data transfer configuration. If new credentials are - # needed for this transfer configuration, authorization info must be provided. - # If authorization info is provided, the transfer configuration will be - # associated with the user id corresponding to the authorization info. - # Otherwise, the transfer configuration will be associated with the calling - # user. - # - # When using a cross project service account for creating a transfer config, - # you must enable cross project service account usage. For more information, - # see [Disable attachment of service accounts to resources in other - # projects](https://cloud.google.com/resource-manager/docs/organization-policy/restricting-service-accounts#disable_cross_project_service_accounts). - # @!attribute [rw] parent - # @return [::String] - # Required. The BigQuery project id where the transfer configuration should - # be created. Must be in the format - # projects/\\{project_id}/locations/\\{location_id} or projects/\\{project_id}. If - # specified location and location of the destination bigquery dataset do not - # match - the request will fail. - # @!attribute [rw] transfer_config - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # Required. Data transfer configuration to create. - # @!attribute [rw] authorization_code - # @deprecated This field is deprecated and may be removed in the next major version update. - # @return [::String] - # Deprecated: Authorization code was required when - # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used - # in any data sources. Use `version_info` instead. - # - # Optional OAuth2 authorization code to use with this transfer configuration. - # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' - # and new credentials are needed, as indicated by `CheckValidCreds`. In order - # to obtain authorization_code, make a request to the following URL: - #
-          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-          #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # create the transfer config. - # @!attribute [rw] version_info - # @return [::String] - # Optional version info. This parameter replaces `authorization_code` which - # is no longer used in any data sources. This is required only if - # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials - # are needed, as indicated by `CheckValidCreds`. In order to obtain version - # info, make a request to the following URL: - #
-          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-          #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # create the transfer config. - # @!attribute [rw] service_account_name - # @return [::String] - # Optional service account email. If this field is set, the transfer config - # will be created with this service account's credentials. It requires that - # the requesting user calling this API has permissions to act as this service - # account. - # - # Note that not all data sources support service account credentials when - # creating a transfer config. For the latest list of data sources, read about - # [using service - # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). - class CreateTransferConfigRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to update a transfer configuration. To update the user id of the - # transfer configuration, authorization info needs to be provided. - # - # When using a cross project service account for updating a transfer config, - # you must enable cross project service account usage. For more information, - # see [Disable attachment of service accounts to resources in other - # projects](https://cloud.google.com/resource-manager/docs/organization-policy/restricting-service-accounts#disable_cross_project_service_accounts). - # @!attribute [rw] transfer_config - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig] - # Required. Data transfer configuration to create. - # @!attribute [rw] authorization_code - # @deprecated This field is deprecated and may be removed in the next major version update. - # @return [::String] - # Deprecated: Authorization code was required when - # `transferConfig.dataSourceId` is 'youtube_channel' but it is no longer used - # in any data sources. Use `version_info` instead. - # - # Optional OAuth2 authorization code to use with this transfer configuration. - # This is required only if `transferConfig.dataSourceId` is 'youtube_channel' - # and new credentials are needed, as indicated by `CheckValidCreds`. In order - # to obtain authorization_code, make a request to the following URL: - #
-          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-          #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # update the transfer config. - # @!attribute [rw] update_mask - # @return [::Google::Protobuf::FieldMask] - # Required. Required list of fields to be updated in this request. - # @!attribute [rw] version_info - # @return [::String] - # Optional version info. This parameter replaces `authorization_code` which - # is no longer used in any data sources. This is required only if - # `transferConfig.dataSourceId` is 'youtube_channel' *or* new credentials - # are needed, as indicated by `CheckValidCreds`. In order to obtain version - # info, make a request to the following URL: - #
-          #     https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-          #     
- # * The client_id is the OAuth client_id of the data source as - # returned by ListDataSources method. - # * data_source_scopes are the scopes returned by ListDataSources - # method. - # - # Note that this should not be set when `service_account_name` is used to - # update the transfer config. - # @!attribute [rw] service_account_name - # @return [::String] - # Optional service account email. If this field is set, the transfer config - # will be created with this service account's credentials. It requires that - # the requesting user calling this API has permissions to act as this service - # account. - # - # Note that not all data sources support service account credentials when - # creating a transfer config. For the latest list of data sources, read about - # [using service - # accounts](https://cloud.google.com/bigquery-transfer/docs/use-service-accounts). - class UpdateTransferConfigRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to get data transfer information. - # @!attribute [rw] name - # @return [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - class GetTransferConfigRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to delete data transfer information. All associated transfer runs - # and log messages will be deleted as well. - # @!attribute [rw] name - # @return [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - class DeleteTransferConfigRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to get data transfer run information. - # @!attribute [rw] name - # @return [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - # or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - class GetTransferRunRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to delete data transfer run information. - # @!attribute [rw] name - # @return [::String] - # Required. The field will contain name of the resource requested, for - # example: `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - # or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - class DeleteTransferRunRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to list data transfers configured for a BigQuery project. - # @!attribute [rw] parent - # @return [::String] - # Required. The BigQuery project id for which transfer configs - # should be returned: `projects/{project_id}` or - # `projects/{project_id}/locations/{location_id}` - # @!attribute [rw] data_source_ids - # @return [::Array<::String>] - # When specified, only configurations of requested data sources are returned. - # @!attribute [rw] page_token - # @return [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransfersRequest` list results. For multiple-page - # results, `ListTransfersResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @!attribute [rw] page_size - # @return [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - class ListTransferConfigsRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # The returned list of pipelines in the project. - # @!attribute [r] transfer_configs - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>] - # Output only. The stored pipeline transfer configurations. - # @!attribute [r] next_page_token - # @return [::String] - # Output only. The next-pagination token. For multiple-page list results, - # this token can be used as the - # `ListTransferConfigsRequest.page_token` - # to request the next page of list results. - class ListTransferConfigsResponse - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to list data transfer runs. - # @!attribute [rw] parent - # @return [::String] - # Required. Name of transfer configuration for which transfer runs should be - # retrieved. Format of transfer configuration resource name is: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @!attribute [rw] states - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferState>] - # When specified, only transfer runs with requested states are returned. - # @!attribute [rw] page_token - # @return [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransferRunsRequest` list results. For multiple-page - # results, `ListTransferRunsResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @!attribute [rw] page_size - # @return [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @!attribute [rw] run_attempt - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt] - # Indicates how run attempts are to be pulled. - class ListTransferRunsRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # Represents which runs should be pulled. - module RunAttempt - # All runs should be returned. - RUN_ATTEMPT_UNSPECIFIED = 0 - - # Only latest run per day should be returned. - LATEST = 1 - end - end - - # The returned list of pipelines in the project. - # @!attribute [r] transfer_runs - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] - # Output only. The stored pipeline transfer runs. - # @!attribute [r] next_page_token - # @return [::String] - # Output only. The next-pagination token. For multiple-page list results, - # this token can be used as the - # `ListTransferRunsRequest.page_token` - # to request the next page of list results. - class ListTransferRunsResponse - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to get user facing log messages associated with data transfer run. - # @!attribute [rw] parent - # @return [::String] - # Required. Transfer run name in the form: - # `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - # @!attribute [rw] page_token - # @return [::String] - # Pagination token, which can be used to request a specific page - # of `ListTransferLogsRequest` list results. For multiple-page - # results, `ListTransferLogsResponse` outputs - # a `next_page` token, which can be used as the - # `page_token` value to request the next page of list results. - # @!attribute [rw] page_size - # @return [::Integer] - # Page size. The default page size is the maximum value of 1000 results. - # @!attribute [rw] message_types - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>] - # Message types to return. If not populated - INFO, WARNING and ERROR - # messages are returned. - class ListTransferLogsRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # The returned list transfer run messages. - # @!attribute [r] transfer_messages - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>] - # Output only. The stored pipeline transfer messages. - # @!attribute [r] next_page_token - # @return [::String] - # Output only. The next-pagination token. For multiple-page list results, - # this token can be used as the - # `GetTransferRunLogRequest.page_token` - # to request the next page of list results. - class ListTransferLogsResponse - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to determine whether the user has valid credentials. This method - # is used to limit the number of OAuth popups in the user interface. The - # user id is inferred from the API call context. - # If the data source has the Google+ authorization type, this method - # returns false, as it cannot be determined whether the credentials are - # already valid merely based on the user id. - # @!attribute [rw] name - # @return [::String] - # Required. The data source in the form: - # `projects/{project_id}/dataSources/{data_source_id}` or - # `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. - class CheckValidCredsRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A response indicating whether the credentials exist and are valid. - # @!attribute [rw] has_valid_creds - # @return [::Boolean] - # If set to `true`, the credentials exist and are valid. - class CheckValidCredsResponse - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to schedule transfer runs for a time range. - # @!attribute [rw] parent - # @return [::String] - # Required. Transfer configuration name in the form: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @!attribute [rw] start_time - # @return [::Google::Protobuf::Timestamp] - # Required. Start time of the range of transfer runs. For example, - # `"2017-05-25T00:00:00+00:00"`. - # @!attribute [rw] end_time - # @return [::Google::Protobuf::Timestamp] - # Required. End time of the range of transfer runs. For example, - # `"2017-05-30T00:00:00+00:00"`. - class ScheduleTransferRunsRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A response to schedule transfer runs for a time range. - # @!attribute [rw] runs - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] - # The transfer runs that were scheduled. - class ScheduleTransferRunsResponse - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to start manual transfer runs. - # @!attribute [rw] parent - # @return [::String] - # Required. Transfer configuration name in the form: - # `projects/{project_id}/transferConfigs/{config_id}` or - # `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - # @!attribute [rw] requested_time_range - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange] - # A time_range start and end timestamp for historical data files or reports - # that are scheduled to be transferred by the scheduled transfer run. - # requested_time_range must be a past time and cannot include future time - # values. - # @!attribute [rw] requested_run_time - # @return [::Google::Protobuf::Timestamp] - # A run_time timestamp for historical data files or reports - # that are scheduled to be transferred by the scheduled transfer run. - # requested_run_time must be a past time and cannot include future time - # values. - class StartManualTransferRunsRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # A specification for a time range, this will request transfer runs with - # run_time between start_time (inclusive) and end_time (exclusive). - # @!attribute [rw] start_time - # @return [::Google::Protobuf::Timestamp] - # Start time of the range of transfer runs. For example, - # `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than - # the end_time. Creates transfer runs where run_time is in the range - # between start_time (inclusive) and end_time (exclusive). - # @!attribute [rw] end_time - # @return [::Google::Protobuf::Timestamp] - # End time of the range of transfer runs. For example, - # `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future. - # Creates transfer runs where run_time is in the range between start_time - # (inclusive) and end_time (exclusive). - class TimeRange - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end - - # A response to start manual transfer runs. - # @!attribute [rw] runs - # @return [::Array<::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>] - # The transfer runs that were created. - class StartManualTransferRunsResponse - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to enroll a set of data sources so they are visible in the - # BigQuery UI's `Transfer` tab. - # @!attribute [rw] name - # @return [::String] - # Required. The name of the project resource in the form: - # `projects/{project_id}` - # @!attribute [rw] data_source_ids - # @return [::Array<::String>] - # Data sources that are enrolled. It is required to provide at least one - # data source id. - class EnrollDataSourcesRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # A request to unenroll a set of data sources so they are no longer visible in - # the BigQuery UI's `Transfer` tab. - # @!attribute [rw] name - # @return [::String] - # Required. The name of the project resource in the form: - # `projects/{project_id}` - # @!attribute [rw] data_source_ids - # @return [::Array<::String>] - # Data sources that are unenrolled. It is required to provide at least one - # data source id. - class UnenrollDataSourcesRequest - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb deleted file mode 100644 index fe536677e3a6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb +++ /dev/null @@ -1,405 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Cloud - module Bigquery - module DataTransfer - module V1 - # Represents preferences for sending email notifications for transfer run - # events. - # @!attribute [rw] enable_failure_email - # @return [::Boolean] - # If true, email notifications will be sent on transfer run failures. - class EmailPreferences - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Options customizing the data transfer schedule. - # @!attribute [rw] disable_auto_scheduling - # @return [::Boolean] - # If true, automatic scheduling of data transfer runs for this configuration - # will be disabled. The runs can be started on ad-hoc basis using - # StartManualTransferRuns API. When automatic scheduling is disabled, the - # TransferConfig.schedule field will be ignored. - # @!attribute [rw] start_time - # @return [::Google::Protobuf::Timestamp] - # Specifies time to start scheduling transfer runs. The first run will be - # scheduled at or after the start time according to a recurrence pattern - # defined in the schedule string. The start time can be changed at any - # moment. The time when a data transfer can be triggered manually is not - # limited by this option. - # @!attribute [rw] end_time - # @return [::Google::Protobuf::Timestamp] - # Defines time to stop scheduling transfer runs. A transfer run cannot be - # scheduled at or after the end time. The end time can be changed at any - # moment. The time when a data transfer can be triggered manually is not - # limited by this option. - class ScheduleOptions - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # V2 options customizing different types of data transfer schedule. - # This field supports existing time-based and manual transfer schedule. Also - # supports Event-Driven transfer schedule. ScheduleOptionsV2 cannot be used - # together with ScheduleOptions/Schedule. - # @!attribute [rw] time_based_schedule - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TimeBasedSchedule] - # Time based transfer schedule options. This is the default schedule - # option. - # @!attribute [rw] manual_schedule - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ManualSchedule] - # Manual transfer schedule. If set, the transfer run will not be - # auto-scheduled by the system, unless the client invokes - # StartManualTransferRuns. This is equivalent to - # disable_auto_scheduling = true. - # @!attribute [rw] event_driven_schedule - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EventDrivenSchedule] - # Event driven transfer schedule options. If set, the transfer will be - # scheduled upon events arrial. - class ScheduleOptionsV2 - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Options customizing the time based transfer schedule. - # Options are migrated from the original ScheduleOptions message. - # @!attribute [rw] schedule - # @return [::String] - # Data transfer schedule. - # If the data source does not support a custom schedule, this should be - # empty. If it is empty, the default value for the data source will be used. - # The specified times are in UTC. - # Examples of valid format: - # `1st,3rd monday of month 15:30`, - # `every wed,fri of jan,jun 13:15`, and - # `first sunday of quarter 00:00`. - # See more explanation about the format here: - # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format - # - # NOTE: The minimum interval time between recurring transfers depends on the - # data source; refer to the documentation for your data source. - # @!attribute [rw] start_time - # @return [::Google::Protobuf::Timestamp] - # Specifies time to start scheduling transfer runs. The first run will be - # scheduled at or after the start time according to a recurrence pattern - # defined in the schedule string. The start time can be changed at any - # moment. - # @!attribute [rw] end_time - # @return [::Google::Protobuf::Timestamp] - # Defines time to stop scheduling transfer runs. A transfer run cannot be - # scheduled at or after the end time. The end time can be changed at any - # moment. - class TimeBasedSchedule - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Options customizing manual transfers schedule. - class ManualSchedule - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Options customizing EventDriven transfers schedule. - # @!attribute [rw] pubsub_subscription - # @return [::String] - # Pub/Sub subscription name used to receive events. - # Only Google Cloud Storage data source support this option. - # Format: projects/\\{project}/subscriptions/\\{subscription} - class EventDrivenSchedule - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Information about a user. - # @!attribute [rw] email - # @return [::String] - # E-mail address of the user. - class UserInfo - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Represents a data transfer configuration. A transfer configuration - # contains all metadata needed to perform a data transfer. For example, - # `destination_dataset_id` specifies where data should be stored. - # When a new transfer configuration is created, the specified - # `destination_dataset_id` is created when needed and shared with the - # appropriate data source service account. - # @!attribute [rw] name - # @return [::String] - # Identifier. The resource name of the transfer config. - # Transfer config names have the form either - # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}` or - # `projects/{project_id}/transferConfigs/{config_id}`, - # where `config_id` is usually a UUID, even though it is not - # guaranteed or required. The name is ignored when creating a transfer - # config. - # @!attribute [rw] destination_dataset_id - # @return [::String] - # The BigQuery target dataset id. - # @!attribute [rw] display_name - # @return [::String] - # User specified display name for the data transfer. - # @!attribute [rw] data_source_id - # @return [::String] - # Data source ID. This cannot be changed once data transfer is created. The - # full list of available data source IDs can be returned through an API call: - # https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list - # @!attribute [rw] params - # @return [::Google::Protobuf::Struct] - # Parameters specific to each data source. For more information see the - # bq tab in the 'Setting up a data transfer' section for each data source. - # For example the parameters for Cloud Storage transfers are listed here: - # https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq - # @!attribute [rw] schedule - # @return [::String] - # Data transfer schedule. - # If the data source does not support a custom schedule, this should be - # empty. If it is empty, the default value for the data source will be used. - # The specified times are in UTC. - # Examples of valid format: - # `1st,3rd monday of month 15:30`, - # `every wed,fri of jan,jun 13:15`, and - # `first sunday of quarter 00:00`. - # See more explanation about the format here: - # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format - # - # NOTE: The minimum interval time between recurring transfers depends on the - # data source; refer to the documentation for your data source. - # @!attribute [rw] schedule_options - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptions] - # Options customizing the data transfer schedule. - # @!attribute [rw] schedule_options_v2 - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptionsV2] - # Options customizing different types of data transfer schedule. - # This field replaces "schedule" and "schedule_options" fields. - # ScheduleOptionsV2 cannot be used together with ScheduleOptions/Schedule. - # @!attribute [rw] data_refresh_window_days - # @return [::Integer] - # The number of days to look back to automatically refresh the data. - # For example, if `data_refresh_window_days = 10`, then every day - # BigQuery reingests data for [today-10, today-1], rather than ingesting data - # for just [today-1]. - # Only valid if the data source supports the feature. Set the value to 0 - # to use the default value. - # @!attribute [rw] disabled - # @return [::Boolean] - # Is this config disabled. When set to true, no runs will be scheduled for - # this transfer config. - # @!attribute [r] update_time - # @return [::Google::Protobuf::Timestamp] - # Output only. Data transfer modification time. Ignored by server on input. - # @!attribute [r] next_run_time - # @return [::Google::Protobuf::Timestamp] - # Output only. Next time when data transfer will run. - # @!attribute [r] state - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferState] - # Output only. State of the most recently updated transfer run. - # @!attribute [rw] user_id - # @return [::Integer] - # Deprecated. Unique ID of the user on whose behalf transfer is done. - # @!attribute [r] dataset_region - # @return [::String] - # Output only. Region in which BigQuery dataset is located. - # @!attribute [rw] notification_pubsub_topic - # @return [::String] - # Pub/Sub topic where notifications will be sent after transfer runs - # associated with this transfer config finish. - # - # The format for specifying a pubsub topic is: - # `projects/{project_id}/topics/{topic_id}` - # @!attribute [rw] email_preferences - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences] - # Email notifications will be sent according to these preferences - # to the email address of the user who owns this transfer config. - # @!attribute [r] owner_info - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::UserInfo] - # Output only. Information about the user whose credentials are used to - # transfer data. Populated only for `transferConfigs.get` requests. In case - # the user information is not available, this field will not be populated. - # @!attribute [rw] encryption_configuration - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EncryptionConfiguration] - # The encryption configuration part. Currently, it is only used for the - # optional KMS key name. The BigQuery service account of your project must be - # granted permissions to use the key. Read methods will return the key name - # applied in effect. Write methods will apply the key if it is present, or - # otherwise try to apply project default keys if it is absent. - # @!attribute [r] error - # @return [::Google::Rpc::Status] - # Output only. Error code with detailed information about reason of the - # latest config failure. - class TransferConfig - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Represents the encryption configuration for a transfer. - # @!attribute [rw] kms_key_name - # @return [::Google::Protobuf::StringValue] - # The name of the KMS key used for encrypting BigQuery data. - class EncryptionConfiguration - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Represents a data transfer run. - # @!attribute [rw] name - # @return [::String] - # Identifier. The resource name of the transfer run. - # Transfer run names have the form - # `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. - # The name is ignored when creating a transfer run. - # @!attribute [rw] schedule_time - # @return [::Google::Protobuf::Timestamp] - # Minimum time after which a transfer run can be started. - # @!attribute [rw] run_time - # @return [::Google::Protobuf::Timestamp] - # For batch transfer runs, specifies the date and time of the data should be - # ingested. - # @!attribute [rw] error_status - # @return [::Google::Rpc::Status] - # Status of the transfer run. - # @!attribute [r] start_time - # @return [::Google::Protobuf::Timestamp] - # Output only. Time when transfer run was started. - # Parameter ignored by server for input requests. - # @!attribute [r] end_time - # @return [::Google::Protobuf::Timestamp] - # Output only. Time when transfer run ended. - # Parameter ignored by server for input requests. - # @!attribute [r] update_time - # @return [::Google::Protobuf::Timestamp] - # Output only. Last time the data transfer run state was updated. - # @!attribute [r] params - # @return [::Google::Protobuf::Struct] - # Output only. Parameters specific to each data source. For more information - # see the bq tab in the 'Setting up a data transfer' section for each data - # source. For example the parameters for Cloud Storage transfers are listed - # here: - # https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq - # @!attribute [r] destination_dataset_id - # @return [::String] - # Output only. The BigQuery target dataset id. - # @!attribute [r] data_source_id - # @return [::String] - # Output only. Data source id. - # @!attribute [rw] state - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferState] - # Data transfer run state. Ignored for input requests. - # @!attribute [rw] user_id - # @return [::Integer] - # Deprecated. Unique ID of the user on whose behalf transfer is done. - # @!attribute [r] schedule - # @return [::String] - # Output only. Describes the schedule of this transfer run if it was - # created as part of a regular schedule. For batch transfer runs that are - # scheduled manually, this is empty. - # NOTE: the system might choose to delay the schedule depending on the - # current load, so `schedule_time` doesn't always match this. - # @!attribute [r] notification_pubsub_topic - # @return [::String] - # Output only. Pub/Sub topic where a notification will be sent after this - # transfer run finishes. - # - # The format for specifying a pubsub topic is: - # `projects/{project_id}/topics/{topic_id}` - # @!attribute [r] email_preferences - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences] - # Output only. Email notifications will be sent according to these - # preferences to the email address of the user who owns the transfer config - # this run was derived from. - class TransferRun - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Represents a user facing message for a particular data transfer run. - # @!attribute [rw] message_time - # @return [::Google::Protobuf::Timestamp] - # Time when message was logged. - # @!attribute [rw] severity - # @return [::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity] - # Message severity. - # @!attribute [rw] message_text - # @return [::String] - # Message text. - class TransferMessage - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # Represents data transfer user facing message severity. - module MessageSeverity - # No severity specified. - MESSAGE_SEVERITY_UNSPECIFIED = 0 - - # Informational message. - INFO = 1 - - # Warning message. - WARNING = 2 - - # Error message. - ERROR = 3 - end - end - - # DEPRECATED. Represents data transfer type. - # @deprecated This enum is deprecated and may be removed in the next major version update. - module TransferType - # Invalid or Unknown transfer type placeholder. - TRANSFER_TYPE_UNSPECIFIED = 0 - - # Batch data transfer. - BATCH = 1 - - # Streaming data transfer. Streaming data source currently doesn't - # support multiple transfer configs per project. - STREAMING = 2 - end - - # Represents data transfer run state. - module TransferState - # State placeholder (0). - TRANSFER_STATE_UNSPECIFIED = 0 - - # Data transfer is scheduled and is waiting to be picked up by - # data transfer backend (2). - PENDING = 2 - - # Data transfer is in progress (3). - RUNNING = 3 - - # Data transfer completed successfully (4). - SUCCEEDED = 4 - - # Data transfer failed (5). - FAILED = 5 - - # Data transfer is cancelled (6). - CANCELLED = 6 - end - end - end - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb deleted file mode 100644 index fb4d6862eac9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/any.rb +++ /dev/null @@ -1,145 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Protobuf - # `Any` contains an arbitrary serialized protocol buffer message along with a - # URL that describes the type of the serialized message. - # - # Protobuf library provides support to pack/unpack Any values in the form - # of utility functions or additional generated methods of the Any type. - # - # Example 1: Pack and unpack a message in C++. - # - # Foo foo = ...; - # Any any; - # any.PackFrom(foo); - # ... - # if (any.UnpackTo(&foo)) { - # ... - # } - # - # Example 2: Pack and unpack a message in Java. - # - # Foo foo = ...; - # Any any = Any.pack(foo); - # ... - # if (any.is(Foo.class)) { - # foo = any.unpack(Foo.class); - # } - # // or ... - # if (any.isSameTypeAs(Foo.getDefaultInstance())) { - # foo = any.unpack(Foo.getDefaultInstance()); - # } - # - # Example 3: Pack and unpack a message in Python. - # - # foo = Foo(...) - # any = Any() - # any.Pack(foo) - # ... - # if any.Is(Foo.DESCRIPTOR): - # any.Unpack(foo) - # ... - # - # Example 4: Pack and unpack a message in Go - # - # foo := &pb.Foo{...} - # any, err := anypb.New(foo) - # if err != nil { - # ... - # } - # ... - # foo := &pb.Foo{} - # if err := any.UnmarshalTo(foo); err != nil { - # ... - # } - # - # The pack methods provided by protobuf library will by default use - # 'type.googleapis.com/full.type.name' as the type URL and the unpack - # methods only use the fully qualified type name after the last '/' - # in the type URL, for example "foo.bar.com/x/y.z" will yield type - # name "y.z". - # - # JSON - # ==== - # The JSON representation of an `Any` value uses the regular - # representation of the deserialized, embedded message, with an - # additional field `@type` which contains the type URL. Example: - # - # package google.profile; - # message Person { - # string first_name = 1; - # string last_name = 2; - # } - # - # { - # "@type": "type.googleapis.com/google.profile.Person", - # "firstName": , - # "lastName": - # } - # - # If the embedded message type is well-known and has a custom JSON - # representation, that representation will be embedded adding a field - # `value` which holds the custom JSON in addition to the `@type` - # field. Example (for message [google.protobuf.Duration][]): - # - # { - # "@type": "type.googleapis.com/google.protobuf.Duration", - # "value": "1.212s" - # } - # @!attribute [rw] type_url - # @return [::String] - # A URL/resource name that uniquely identifies the type of the serialized - # protocol buffer message. This string must contain at least - # one "/" character. The last segment of the URL's path must represent - # the fully qualified name of the type (as in - # `path/google.protobuf.Duration`). The name should be in a canonical form - # (e.g., leading "." is not accepted). - # - # In practice, teams usually precompile into the binary all types that they - # expect it to use in the context of Any. However, for URLs which use the - # scheme `http`, `https`, or no scheme, one can optionally set up a type - # server that maps type URLs to message definitions as follows: - # - # * If no scheme is provided, `https` is assumed. - # * An HTTP GET on the URL must yield a [google.protobuf.Type][] - # value in binary format, or produce an error. - # * Applications are allowed to cache lookup results based on the - # URL, or have them precompiled into a binary to avoid any - # lookup. Therefore, binary compatibility needs to be preserved - # on changes to types. (Use versioned type names to manage - # breaking changes.) - # - # Note: this functionality is not currently available in the official - # protobuf release, and it is not used for type URLs beginning with - # type.googleapis.com. As of May 2023, there are no widely used type server - # implementations and no plans to implement one. - # - # Schemes other than `http`, `https` (or the empty scheme) might be - # used with implementation specific semantics. - # @!attribute [rw] value - # @return [::String] - # Must be a valid serialized protocol buffer of the above specified type. - class Any - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb deleted file mode 100644 index b5731a824060..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/duration.rb +++ /dev/null @@ -1,98 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Protobuf - # A Duration represents a signed, fixed-length span of time represented - # as a count of seconds and fractions of seconds at nanosecond - # resolution. It is independent of any calendar and concepts like "day" - # or "month". It is related to Timestamp in that the difference between - # two Timestamp values is a Duration and it can be added or subtracted - # from a Timestamp. Range is approximately +-10,000 years. - # - # # Examples - # - # Example 1: Compute Duration from two Timestamps in pseudo code. - # - # Timestamp start = ...; - # Timestamp end = ...; - # Duration duration = ...; - # - # duration.seconds = end.seconds - start.seconds; - # duration.nanos = end.nanos - start.nanos; - # - # if (duration.seconds < 0 && duration.nanos > 0) { - # duration.seconds += 1; - # duration.nanos -= 1000000000; - # } else if (duration.seconds > 0 && duration.nanos < 0) { - # duration.seconds -= 1; - # duration.nanos += 1000000000; - # } - # - # Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - # - # Timestamp start = ...; - # Duration duration = ...; - # Timestamp end = ...; - # - # end.seconds = start.seconds + duration.seconds; - # end.nanos = start.nanos + duration.nanos; - # - # if (end.nanos < 0) { - # end.seconds -= 1; - # end.nanos += 1000000000; - # } else if (end.nanos >= 1000000000) { - # end.seconds += 1; - # end.nanos -= 1000000000; - # } - # - # Example 3: Compute Duration from datetime.timedelta in Python. - # - # td = datetime.timedelta(days=3, minutes=10) - # duration = Duration() - # duration.FromTimedelta(td) - # - # # JSON Mapping - # - # In JSON format, the Duration type is encoded as a string rather than an - # object, where the string ends in the suffix "s" (indicating seconds) and - # is preceded by the number of seconds, with nanoseconds expressed as - # fractional seconds. For example, 3 seconds with 0 nanoseconds should be - # encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should - # be expressed in JSON format as "3.000000001s", and 3 seconds and 1 - # microsecond should be expressed in JSON format as "3.000001s". - # @!attribute [rw] seconds - # @return [::Integer] - # Signed seconds of the span of time. Must be from -315,576,000,000 - # to +315,576,000,000 inclusive. Note: these bounds are computed from: - # 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - # @!attribute [rw] nanos - # @return [::Integer] - # Signed fractions of a second at nanosecond resolution of the span - # of time. Durations less than one second are represented with a 0 - # `seconds` field and a positive or negative `nanos` field. For durations - # of one second or more, a non-zero value for the `nanos` field must be - # of the same sign as the `seconds` field. Must be from -999,999,999 - # to +999,999,999 inclusive. - class Duration - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb deleted file mode 100644 index 8c6b19d52e3d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/empty.rb +++ /dev/null @@ -1,34 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Protobuf - # A generic empty message that you can re-use to avoid defining duplicated - # empty messages in your APIs. A typical example is to use it as the request - # or the response type of an API method. For instance: - # - # service Foo { - # rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - # } - class Empty - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb deleted file mode 100644 index 8e7abcf8f052..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/field_mask.rb +++ /dev/null @@ -1,229 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Protobuf - # `FieldMask` represents a set of symbolic field paths, for example: - # - # paths: "f.a" - # paths: "f.b.d" - # - # Here `f` represents a field in some root message, `a` and `b` - # fields in the message found in `f`, and `d` a field found in the - # message in `f.b`. - # - # Field masks are used to specify a subset of fields that should be - # returned by a get operation or modified by an update operation. - # Field masks also have a custom JSON encoding (see below). - # - # # Field Masks in Projections - # - # When used in the context of a projection, a response message or - # sub-message is filtered by the API to only contain those fields as - # specified in the mask. For example, if the mask in the previous - # example is applied to a response message as follows: - # - # f { - # a : 22 - # b { - # d : 1 - # x : 2 - # } - # y : 13 - # } - # z: 8 - # - # The result will not contain specific values for fields x,y and z - # (their value will be set to the default, and omitted in proto text - # output): - # - # - # f { - # a : 22 - # b { - # d : 1 - # } - # } - # - # A repeated field is not allowed except at the last position of a - # paths string. - # - # If a FieldMask object is not present in a get operation, the - # operation applies to all fields (as if a FieldMask of all fields - # had been specified). - # - # Note that a field mask does not necessarily apply to the - # top-level response message. In case of a REST get operation, the - # field mask applies directly to the response, but in case of a REST - # list operation, the mask instead applies to each individual message - # in the returned resource list. In case of a REST custom method, - # other definitions may be used. Where the mask applies will be - # clearly documented together with its declaration in the API. In - # any case, the effect on the returned resource/resources is required - # behavior for APIs. - # - # # Field Masks in Update Operations - # - # A field mask in update operations specifies which fields of the - # targeted resource are going to be updated. The API is required - # to only change the values of the fields as specified in the mask - # and leave the others untouched. If a resource is passed in to - # describe the updated values, the API ignores the values of all - # fields not covered by the mask. - # - # If a repeated field is specified for an update operation, new values will - # be appended to the existing repeated field in the target resource. Note that - # a repeated field is only allowed in the last position of a `paths` string. - # - # If a sub-message is specified in the last position of the field mask for an - # update operation, then new value will be merged into the existing sub-message - # in the target resource. - # - # For example, given the target message: - # - # f { - # b { - # d: 1 - # x: 2 - # } - # c: [1] - # } - # - # And an update message: - # - # f { - # b { - # d: 10 - # } - # c: [2] - # } - # - # then if the field mask is: - # - # paths: ["f.b", "f.c"] - # - # then the result will be: - # - # f { - # b { - # d: 10 - # x: 2 - # } - # c: [1, 2] - # } - # - # An implementation may provide options to override this default behavior for - # repeated and message fields. - # - # In order to reset a field's value to the default, the field must - # be in the mask and set to the default value in the provided resource. - # Hence, in order to reset all fields of a resource, provide a default - # instance of the resource and set all fields in the mask, or do - # not provide a mask as described below. - # - # If a field mask is not present on update, the operation applies to - # all fields (as if a field mask of all fields has been specified). - # Note that in the presence of schema evolution, this may mean that - # fields the client does not know and has therefore not filled into - # the request will be reset to their default. If this is unwanted - # behavior, a specific service may require a client to always specify - # a field mask, producing an error if not. - # - # As with get operations, the location of the resource which - # describes the updated values in the request message depends on the - # operation kind. In any case, the effect of the field mask is - # required to be honored by the API. - # - # ## Considerations for HTTP REST - # - # The HTTP kind of an update operation which uses a field mask must - # be set to PATCH instead of PUT in order to satisfy HTTP semantics - # (PUT must only be used for full updates). - # - # # JSON Encoding of Field Masks - # - # In JSON, a field mask is encoded as a single string where paths are - # separated by a comma. Fields name in each path are converted - # to/from lower-camel naming conventions. - # - # As an example, consider the following message declarations: - # - # message Profile { - # User user = 1; - # Photo photo = 2; - # } - # message User { - # string display_name = 1; - # string address = 2; - # } - # - # In proto a field mask for `Profile` may look as such: - # - # mask { - # paths: "user.display_name" - # paths: "photo" - # } - # - # In JSON, the same mask is represented as below: - # - # { - # mask: "user.displayName,photo" - # } - # - # # Field Masks and Oneof Fields - # - # Field masks treat fields in oneofs just as regular fields. Consider the - # following message: - # - # message SampleMessage { - # oneof test_oneof { - # string name = 4; - # SubMessage sub_message = 9; - # } - # } - # - # The field mask can be: - # - # mask { - # paths: "name" - # } - # - # Or: - # - # mask { - # paths: "sub_message" - # } - # - # Note that oneof type names ("test_oneof" in this case) cannot be used in - # paths. - # - # ## Field Mask Verification - # - # The implementation of any API method which has a FieldMask type field in the - # request should verify the included field paths, and return an - # `INVALID_ARGUMENT` error if any path is unmappable. - # @!attribute [rw] paths - # @return [::Array<::String>] - # The set of field mask paths. - class FieldMask - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb deleted file mode 100644 index 9e96368be9d9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/struct.rb +++ /dev/null @@ -1,96 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Protobuf - # `Struct` represents a structured data value, consisting of fields - # which map to dynamically typed values. In some languages, `Struct` - # might be supported by a native representation. For example, in - # scripting languages like JS a struct is represented as an - # object. The details of that representation are described together - # with the proto support for the language. - # - # The JSON representation for `Struct` is JSON object. - # @!attribute [rw] fields - # @return [::Google::Protobuf::Map{::String => ::Google::Protobuf::Value}] - # Unordered map of dynamically typed values. - class Struct - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - - # @!attribute [rw] key - # @return [::String] - # @!attribute [rw] value - # @return [::Google::Protobuf::Value] - class FieldsEntry - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end - - # `Value` represents a dynamically typed value which can be either - # null, a number, a string, a boolean, a recursive struct value, or a - # list of values. A producer of value is expected to set one of these - # variants. Absence of any variant indicates an error. - # - # The JSON representation for `Value` is JSON value. - # @!attribute [rw] null_value - # @return [::Google::Protobuf::NullValue] - # Represents a null value. - # @!attribute [rw] number_value - # @return [::Float] - # Represents a double value. - # @!attribute [rw] string_value - # @return [::String] - # Represents a string value. - # @!attribute [rw] bool_value - # @return [::Boolean] - # Represents a boolean value. - # @!attribute [rw] struct_value - # @return [::Google::Protobuf::Struct] - # Represents a structured value. - # @!attribute [rw] list_value - # @return [::Google::Protobuf::ListValue] - # Represents a repeated `Value`. - class Value - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # `ListValue` is a wrapper around a repeated field of values. - # - # The JSON representation for `ListValue` is JSON array. - # @!attribute [rw] values - # @return [::Array<::Google::Protobuf::Value>] - # Repeated field of dynamically typed values. - class ListValue - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # `NullValue` is a singleton enumeration to represent the null value for the - # `Value` type union. - # - # The JSON representation for `NullValue` is JSON `null`. - module NullValue - # Null value. - NULL_VALUE = 0 - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb deleted file mode 100644 index 4ac9c4801a3f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/timestamp.rb +++ /dev/null @@ -1,127 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Protobuf - # A Timestamp represents a point in time independent of any time zone or local - # calendar, encoded as a count of seconds and fractions of seconds at - # nanosecond resolution. The count is relative to an epoch at UTC midnight on - # January 1, 1970, in the proleptic Gregorian calendar which extends the - # Gregorian calendar backwards to year one. - # - # All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap - # second table is needed for interpretation, using a [24-hour linear - # smear](https://developers.google.com/time/smear). - # - # The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By - # restricting to that range, we ensure that we can convert to and from [RFC - # 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. - # - # # Examples - # - # Example 1: Compute Timestamp from POSIX `time()`. - # - # Timestamp timestamp; - # timestamp.set_seconds(time(NULL)); - # timestamp.set_nanos(0); - # - # Example 2: Compute Timestamp from POSIX `gettimeofday()`. - # - # struct timeval tv; - # gettimeofday(&tv, NULL); - # - # Timestamp timestamp; - # timestamp.set_seconds(tv.tv_sec); - # timestamp.set_nanos(tv.tv_usec * 1000); - # - # Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - # - # FILETIME ft; - # GetSystemTimeAsFileTime(&ft); - # UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - # - # // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - # // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - # Timestamp timestamp; - # timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - # timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - # - # Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - # - # long millis = System.currentTimeMillis(); - # - # Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - # .setNanos((int) ((millis % 1000) * 1000000)).build(); - # - # Example 5: Compute Timestamp from Java `Instant.now()`. - # - # Instant now = Instant.now(); - # - # Timestamp timestamp = - # Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - # .setNanos(now.getNano()).build(); - # - # Example 6: Compute Timestamp from current time in Python. - # - # timestamp = Timestamp() - # timestamp.GetCurrentTime() - # - # # JSON Mapping - # - # In JSON format, the Timestamp type is encoded as a string in the - # [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the - # format is "\\{year}-\\{month}-\\{day}T\\{hour}:\\{min}:\\{sec}[.\\{frac_sec}]Z" - # where \\{year} is always expressed using four digits while \\{month}, \\{day}, - # \\{hour}, \\{min}, and \\{sec} are zero-padded to two digits each. The fractional - # seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), - # are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone - # is required. A proto3 JSON serializer should always use UTC (as indicated by - # "Z") when printing the Timestamp type and a proto3 JSON parser should be - # able to accept both UTC and other timezones (as indicated by an offset). - # - # For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past - # 01:30 UTC on January 15, 2017. - # - # In JavaScript, one can convert a Date object to this format using the - # standard - # [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) - # method. In Python, a standard `datetime.datetime` object can be converted - # to this format using - # [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with - # the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use - # the Joda Time's [`ISODateTimeFormat.dateTime()`]( - # http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() - # ) to obtain a formatter capable of generating timestamps in this format. - # @!attribute [rw] seconds - # @return [::Integer] - # Represents seconds of UTC time since Unix epoch - # 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - # 9999-12-31T23:59:59Z inclusive. - # @!attribute [rw] nanos - # @return [::Integer] - # Non-negative fractions of a second at nanosecond resolution. Negative - # second values with fractions must still have non-negative nanos values - # that count forward in time. Must be from 0 to 999,999,999 - # inclusive. - class Timestamp - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb deleted file mode 100644 index 5160138862c2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/protobuf/wrappers.rb +++ /dev/null @@ -1,121 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Protobuf - # Wrapper message for `double`. - # - # The JSON representation for `DoubleValue` is JSON number. - # @!attribute [rw] value - # @return [::Float] - # The double value. - class DoubleValue - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `float`. - # - # The JSON representation for `FloatValue` is JSON number. - # @!attribute [rw] value - # @return [::Float] - # The float value. - class FloatValue - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `int64`. - # - # The JSON representation for `Int64Value` is JSON string. - # @!attribute [rw] value - # @return [::Integer] - # The int64 value. - class Int64Value - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `uint64`. - # - # The JSON representation for `UInt64Value` is JSON string. - # @!attribute [rw] value - # @return [::Integer] - # The uint64 value. - class UInt64Value - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `int32`. - # - # The JSON representation for `Int32Value` is JSON number. - # @!attribute [rw] value - # @return [::Integer] - # The int32 value. - class Int32Value - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `uint32`. - # - # The JSON representation for `UInt32Value` is JSON number. - # @!attribute [rw] value - # @return [::Integer] - # The uint32 value. - class UInt32Value - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `bool`. - # - # The JSON representation for `BoolValue` is JSON `true` and `false`. - # @!attribute [rw] value - # @return [::Boolean] - # The bool value. - class BoolValue - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `string`. - # - # The JSON representation for `StringValue` is JSON string. - # @!attribute [rw] value - # @return [::String] - # The string value. - class StringValue - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - - # Wrapper message for `bytes`. - # - # The JSON representation for `BytesValue` is JSON string. - # @!attribute [rw] value - # @return [::String] - # The bytes value. - class BytesValue - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb deleted file mode 100644 index 09acc69b6125..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/proto_docs/google/rpc/status.rb +++ /dev/null @@ -1,48 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - - -module Google - module Rpc - # The `Status` type defines a logical error model that is suitable for - # different programming environments, including REST APIs and RPC APIs. It is - # used by [gRPC](https://github.com/grpc). Each `Status` message contains - # three pieces of data: error code, error message, and error details. - # - # You can find out more about this error model and how to work with it in the - # [API Design Guide](https://cloud.google.com/apis/design/errors). - # @!attribute [rw] code - # @return [::Integer] - # The status code, which should be an enum value of - # [google.rpc.Code][google.rpc.Code]. - # @!attribute [rw] message - # @return [::String] - # A developer-facing error message, which should be in English. Any - # user-facing error message should be localized and sent in the - # {::Google::Rpc::Status#details google.rpc.Status.details} field, or localized - # by the client. - # @!attribute [rw] details - # @return [::Array<::Google::Protobuf::Any>] - # A list of messages that carry the error details. There is a common set of - # message types for APIs to use. - class Status - include ::Google::Protobuf::MessageExts - extend ::Google::Protobuf::MessageExts::ClassMethods - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile deleted file mode 100644 index 27eda5dbdbd1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/Gemfile +++ /dev/null @@ -1,32 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -source "https://rubygems.org" - -if ENV["GOOGLE_CLOUD_SAMPLES_TEST"] == "master" - gem "google-cloud-bigquery-data_transfer-v1", path: "../" -else - gem "google-cloud-bigquery-data_transfer-v1" -end - -group :test do - gem "google-style", "~> 1.26.1" - gem "minitest", "~> 5.16" - gem "minitest-focus", "~> 1.1" - gem "minitest-hooks", "~> 1.5" -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb deleted file mode 100644 index 2f41db133d0e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/check_valid_creds.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the check_valid_creds call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#check_valid_creds. -# -def check_valid_creds - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new - - # Call the check_valid_creds method. - result = client.check_valid_creds request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb deleted file mode 100644 index 297f5d8bf175..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/create_transfer_config.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the create_transfer_config call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#create_transfer_config. -# -def create_transfer_config - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new - - # Call the create_transfer_config method. - result = client.create_transfer_config request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb deleted file mode 100644 index 1d5c08777839..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_config.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the delete_transfer_config call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_config. -# -def delete_transfer_config - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new - - # Call the delete_transfer_config method. - result = client.delete_transfer_config request - - # The returned object is of type Google::Protobuf::Empty. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb deleted file mode 100644 index 79209dcf9fe9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/delete_transfer_run.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the delete_transfer_run call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_run. -# -def delete_transfer_run - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new - - # Call the delete_transfer_run method. - result = client.delete_transfer_run request - - # The returned object is of type Google::Protobuf::Empty. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb deleted file mode 100644 index 5159c1b9658a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/enroll_data_sources.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the enroll_data_sources call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#enroll_data_sources. -# -def enroll_data_sources - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new - - # Call the enroll_data_sources method. - result = client.enroll_data_sources request - - # The returned object is of type Google::Protobuf::Empty. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb deleted file mode 100644 index 2eb9bdd308f1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_data_source.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the get_data_source call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_data_source. -# -def get_data_source - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new - - # Call the get_data_source method. - result = client.get_data_source request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb deleted file mode 100644 index c8b1dfaaf116..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_config.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the get_transfer_config call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_config. -# -def get_transfer_config - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new - - # Call the get_transfer_config method. - result = client.get_transfer_config request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb deleted file mode 100644 index 8e738239a0a7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/get_transfer_run.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the get_transfer_run call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_run. -# -def get_transfer_run - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new - - # Call the get_transfer_run method. - result = client.get_transfer_run request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb deleted file mode 100644 index c970382cedfa..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_data_sources.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the list_data_sources call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_data_sources. -# -def list_data_sources - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new - - # Call the list_data_sources method. - result = client.list_data_sources request - - # The returned object is of type Gapic::PagedEnumerable. You can iterate - # over elements, and API calls will be issued to fetch pages as needed. - result.each do |item| - # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource. - p item - end -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb deleted file mode 100644 index 6793c3706c00..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_configs.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the list_transfer_configs call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_configs. -# -def list_transfer_configs - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new - - # Call the list_transfer_configs method. - result = client.list_transfer_configs request - - # The returned object is of type Gapic::PagedEnumerable. You can iterate - # over elements, and API calls will be issued to fetch pages as needed. - result.each do |item| - # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - p item - end -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb deleted file mode 100644 index 456c8d63b7ad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_logs.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the list_transfer_logs call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_logs. -# -def list_transfer_logs - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new - - # Call the list_transfer_logs method. - result = client.list_transfer_logs request - - # The returned object is of type Gapic::PagedEnumerable. You can iterate - # over elements, and API calls will be issued to fetch pages as needed. - result.each do |item| - # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage. - p item - end -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb deleted file mode 100644 index c5f588f2844d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/list_transfer_runs.rb +++ /dev/null @@ -1,51 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the list_transfer_runs call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_runs. -# -def list_transfer_runs - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new - - # Call the list_transfer_runs method. - result = client.list_transfer_runs request - - # The returned object is of type Gapic::PagedEnumerable. You can iterate - # over elements, and API calls will be issued to fetch pages as needed. - result.each do |item| - # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun. - p item - end -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb deleted file mode 100644 index a347a1fc13e9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/schedule_transfer_runs.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the schedule_transfer_runs call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#schedule_transfer_runs. -# -def schedule_transfer_runs - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new - - # Call the schedule_transfer_runs method. - result = client.schedule_transfer_runs request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb deleted file mode 100644 index 0eda612768c0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/start_manual_transfer_runs.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the start_manual_transfer_runs call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#start_manual_transfer_runs. -# -def start_manual_transfer_runs - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new - - # Call the start_manual_transfer_runs method. - result = client.start_manual_transfer_runs request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb deleted file mode 100644 index 357c7fb83f83..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/unenroll_data_sources.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the unenroll_data_sources call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#unenroll_data_sources. -# -def unenroll_data_sources - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new - - # Call the unenroll_data_sources method. - result = client.unenroll_data_sources request - - # The returned object is of type Google::Protobuf::Empty. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb deleted file mode 100644 index 74f806b153a9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/data_transfer_service/update_transfer_config.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -# [START bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync] -require "google/cloud/bigquery/data_transfer/v1" - -## -# Snippet for the update_transfer_config call in the DataTransferService service -# -# This snippet has been automatically generated and should be regarded as a code -# template only. It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in https://cloud.google.com/ruby/docs/reference. -# -# This is an auto-generated example demonstrating basic usage of -# Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#update_transfer_config. -# -def update_transfer_config - # Create a client object. The client can be reused for multiple calls. - client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new - - # Create a request. To set request fields, pass in keyword arguments. - request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new - - # Call the update_transfer_config method. - result = client.update_transfer_config request - - # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig. - p result -end -# [END bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json deleted file mode 100644 index c079afe66ffd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/snippets/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ /dev/null @@ -1,655 +0,0 @@ -{ - "client_library": { - "name": "google-cloud-bigquery-data_transfer-v1", - "version": "", - "language": "RUBY", - "apis": [ - { - "id": "google.cloud.bigquery.datatransfer.v1", - "version": "v1" - } - ] - }, - "snippets": [ - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync", - "title": "Snippet for the get_data_source call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_data_source.", - "file": "data_transfer_service/get_data_source.rb", - "language": "RUBY", - "client_method": { - "short_name": "get_data_source", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_data_source", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::DataSource", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "GetDataSource", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetDataSource", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync", - "title": "Snippet for the list_data_sources call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_data_sources.", - "file": "data_transfer_service/list_data_sources.rb", - "language": "RUBY", - "client_method": { - "short_name": "list_data_sources", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_data_sources", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "ListDataSources", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListDataSources", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 50, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync", - "title": "Snippet for the create_transfer_config call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#create_transfer_config.", - "file": "data_transfer_service/create_transfer_config.rb", - "language": "RUBY", - "client_method": { - "short_name": "create_transfer_config", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#create_transfer_config", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "CreateTransferConfig", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync", - "title": "Snippet for the update_transfer_config call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#update_transfer_config.", - "file": "data_transfer_service/update_transfer_config.rb", - "language": "RUBY", - "client_method": { - "short_name": "update_transfer_config", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#update_transfer_config", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "UpdateTransferConfig", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync", - "title": "Snippet for the delete_transfer_config call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_config.", - "file": "data_transfer_service/delete_transfer_config.rb", - "language": "RUBY", - "client_method": { - "short_name": "delete_transfer_config", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_config", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest", - "name": "request" - } - ], - "result_type": "::Google::Protobuf::Empty", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "DeleteTransferConfig", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync", - "title": "Snippet for the get_transfer_config call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_config.", - "file": "data_transfer_service/get_transfer_config.rb", - "language": "RUBY", - "client_method": { - "short_name": "get_transfer_config", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_config", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "GetTransferConfig", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferConfig", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync", - "title": "Snippet for the list_transfer_configs call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_configs.", - "file": "data_transfer_service/list_transfer_configs.rb", - "language": "RUBY", - "client_method": { - "short_name": "list_transfer_configs", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_configs", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "ListTransferConfigs", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferConfigs", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 50, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync", - "title": "Snippet for the schedule_transfer_runs call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#schedule_transfer_runs.", - "file": "data_transfer_service/schedule_transfer_runs.rb", - "language": "RUBY", - "client_method": { - "short_name": "schedule_transfer_runs", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#schedule_transfer_runs", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "ScheduleTransferRuns", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync", - "title": "Snippet for the start_manual_transfer_runs call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#start_manual_transfer_runs.", - "file": "data_transfer_service/start_manual_transfer_runs.rb", - "language": "RUBY", - "client_method": { - "short_name": "start_manual_transfer_runs", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#start_manual_transfer_runs", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "StartManualTransferRuns", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.StartManualTransferRuns", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync", - "title": "Snippet for the get_transfer_run call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_run.", - "file": "data_transfer_service/get_transfer_run.rb", - "language": "RUBY", - "client_method": { - "short_name": "get_transfer_run", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#get_transfer_run", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "GetTransferRun", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferRun", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync", - "title": "Snippet for the delete_transfer_run call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_run.", - "file": "data_transfer_service/delete_transfer_run.rb", - "language": "RUBY", - "client_method": { - "short_name": "delete_transfer_run", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#delete_transfer_run", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest", - "name": "request" - } - ], - "result_type": "::Google::Protobuf::Empty", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "DeleteTransferRun", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync", - "title": "Snippet for the list_transfer_runs call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_runs.", - "file": "data_transfer_service/list_transfer_runs.rb", - "language": "RUBY", - "client_method": { - "short_name": "list_transfer_runs", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_runs", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "ListTransferRuns", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferRuns", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 50, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync", - "title": "Snippet for the list_transfer_logs call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_logs.", - "file": "data_transfer_service/list_transfer_logs.rb", - "language": "RUBY", - "client_method": { - "short_name": "list_transfer_logs", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#list_transfer_logs", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "ListTransferLogs", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferLogs", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 50, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync", - "title": "Snippet for the check_valid_creds call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#check_valid_creds.", - "file": "data_transfer_service/check_valid_creds.rb", - "language": "RUBY", - "client_method": { - "short_name": "check_valid_creds", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#check_valid_creds", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest", - "name": "request" - } - ], - "result_type": "::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "CheckValidCreds", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CheckValidCreds", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync", - "title": "Snippet for the enroll_data_sources call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#enroll_data_sources.", - "file": "data_transfer_service/enroll_data_sources.rb", - "language": "RUBY", - "client_method": { - "short_name": "enroll_data_sources", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#enroll_data_sources", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest", - "name": "request" - } - ], - "result_type": "::Google::Protobuf::Empty", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "EnrollDataSources", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.EnrollDataSources", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - }, - { - "region_tag": "bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync", - "title": "Snippet for the unenroll_data_sources call in the DataTransferService service", - "description": "This is an auto-generated example demonstrating basic usage of Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#unenroll_data_sources.", - "file": "data_transfer_service/unenroll_data_sources.rb", - "language": "RUBY", - "client_method": { - "short_name": "unenroll_data_sources", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client#unenroll_data_sources", - "async": false, - "parameters": [ - { - "type": "::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest", - "name": "request" - } - ], - "result_type": "::Google::Protobuf::Empty", - "client": { - "short_name": "DataTransferService::Client", - "full_name": "::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client" - }, - "method": { - "short_name": "UnenrollDataSources", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UnenrollDataSources", - "service": { - "short_name": "DataTransferService", - "full_name": "google.cloud.bigquery.datatransfer.v1.DataTransferService" - } - } - }, - "canonical": true, - "origin": "API_DEFINITION", - "segments": [ - { - "start": 20, - "end": 46, - "type": "FULL" - } - ] - } - ] -} \ No newline at end of file diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb deleted file mode 100644 index 054310efd5f4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_paths_test.rb +++ /dev/null @@ -1,104 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "helper" - -require "gapic/grpc/service_stub" - -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" - -class ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::ClientPathsTest < Minitest::Test - class DummyStub - def endpoint - "endpoint.example.com" - end - - def universe_domain - "example.com" - end - end - - def test_data_source_path - grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - ::Gapic::ServiceStub.stub :new, DummyStub.new do - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - path = client.data_source_path project: "value0", data_source: "value1" - assert_equal "projects/value0/dataSources/value1", path - - path = client.data_source_path project: "value0", location: "value1", data_source: "value2" - assert_equal "projects/value0/locations/value1/dataSources/value2", path - end - end - - def test_location_path - grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - ::Gapic::ServiceStub.stub :new, DummyStub.new do - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - path = client.location_path project: "value0", location: "value1" - assert_equal "projects/value0/locations/value1", path - end - end - - def test_project_path - grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - ::Gapic::ServiceStub.stub :new, DummyStub.new do - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - path = client.project_path project: "value0" - assert_equal "projects/value0", path - end - end - - def test_run_path - grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - ::Gapic::ServiceStub.stub :new, DummyStub.new do - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - path = client.run_path project: "value0", transfer_config: "value1", run: "value2" - assert_equal "projects/value0/transferConfigs/value1/runs/value2", path - - path = client.run_path project: "value0", location: "value1", transfer_config: "value2", run: "value3" - assert_equal "projects/value0/locations/value1/transferConfigs/value2/runs/value3", path - end - end - - def test_transfer_config_path - grpc_channel = ::GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - ::Gapic::ServiceStub.stub :new, DummyStub.new do - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - path = client.transfer_config_path project: "value0", transfer_config: "value1" - assert_equal "projects/value0/transferConfigs/value1", path - - path = client.transfer_config_path project: "value0", location: "value1", transfer_config: "value2" - assert_equal "projects/value0/locations/value1/transferConfigs/value2", path - end - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb deleted file mode 100644 index 2f9ff48637f1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_rest_test.rb +++ /dev/null @@ -1,980 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "helper" -require "gapic/rest" -require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service/rest" - - -class ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ClientTest < Minitest::Test - class ClientStub - attr_accessor :call_count, :requests - - def initialize response, &block - @response = response - @block = block - @call_count = 0 - @requests = [] - end - - def make_get_request uri:, params: {}, options: {} - make_http_request :get, uri: uri, body: nil, params: params, options: options - end - - def make_delete_request uri:, params: {}, options: {} - make_http_request :delete, uri: uri, body: nil, params: params, options: options - end - - def make_post_request uri:, body: nil, params: {}, options: {} - make_http_request :post, uri: uri, body: body, params: params, options: options - end - - def make_patch_request uri:, body:, params: {}, options: {} - make_http_request :patch, uri: uri, body: body, params: params, options: options - end - - def make_put_request uri:, body:, params: {}, options: {} - make_http_request :put, uri: uri, body: body, params: params, options: options - end - - def make_http_request *args, **kwargs - @call_count += 1 - - @requests << @block&.call(*args, **kwargs) - - @response - end - - def endpoint - "endpoint.example.com" - end - - def universe_domain - "example.com" - end - end - - def test_get_data_source - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - get_data_source_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_get_data_source_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, get_data_source_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.get_data_source({ name: name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.get_data_source name: name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.get_data_source ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.get_data_source({ name: name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.get_data_source(::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, get_data_source_client_stub.call_count - end - end - end - - def test_list_data_sources - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - page_token = "hello world" - page_size = 42 - - list_data_sources_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_data_sources_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, list_data_sources_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.list_data_sources parent: parent, page_token: page_token, page_size: page_size do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.list_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.list_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, list_data_sources_client_stub.call_count - end - end - end - - def test_create_transfer_config - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - transfer_config = {} - authorization_code = "hello world" - version_info = "hello world" - service_account_name = "hello world" - - create_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_create_transfer_config_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, create_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.create_transfer_config parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.create_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.create_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, create_transfer_config_client_stub.call_count - end - end - end - - def test_update_transfer_config - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - transfer_config = {} - authorization_code = "hello world" - update_mask = {} - version_info = "hello world" - service_account_name = "hello world" - - update_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_update_transfer_config_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, update_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.update_transfer_config transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.update_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.update_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, update_transfer_config_client_stub.call_count - end - end - end - - def test_delete_transfer_config - # Create test objects. - client_result = ::Google::Protobuf::Empty.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - delete_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_delete_transfer_config_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, delete_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.delete_transfer_config({ name: name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.delete_transfer_config name: name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.delete_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.delete_transfer_config({ name: name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.delete_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, delete_transfer_config_client_stub.call_count - end - end - end - - def test_get_transfer_config - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - get_transfer_config_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_get_transfer_config_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, get_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.get_transfer_config({ name: name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.get_transfer_config name: name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.get_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.get_transfer_config({ name: name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.get_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, get_transfer_config_client_stub.call_count - end - end - end - - def test_list_transfer_configs - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - data_source_ids = ["hello world"] - page_token = "hello world" - page_size = 42 - - list_transfer_configs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_transfer_configs_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, list_transfer_configs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.list_transfer_configs parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.list_transfer_configs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.list_transfer_configs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, list_transfer_configs_client_stub.call_count - end - end - end - - def test_schedule_transfer_runs - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - start_time = {} - end_time = {} - - schedule_transfer_runs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_schedule_transfer_runs_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, schedule_transfer_runs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.schedule_transfer_runs parent: parent, start_time: start_time, end_time: end_time do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.schedule_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.schedule_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, schedule_transfer_runs_client_stub.call_count - end - end - end - - def test_start_manual_transfer_runs - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - requested_time_range = {} - - start_manual_transfer_runs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_start_manual_transfer_runs_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, start_manual_transfer_runs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.start_manual_transfer_runs parent: parent, requested_time_range: requested_time_range do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.start_manual_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.start_manual_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, start_manual_transfer_runs_client_stub.call_count - end - end - end - - def test_get_transfer_run - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - get_transfer_run_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_get_transfer_run_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, get_transfer_run_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.get_transfer_run({ name: name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.get_transfer_run name: name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.get_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.get_transfer_run({ name: name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.get_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, get_transfer_run_client_stub.call_count - end - end - end - - def test_delete_transfer_run - # Create test objects. - client_result = ::Google::Protobuf::Empty.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - delete_transfer_run_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_delete_transfer_run_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, delete_transfer_run_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.delete_transfer_run({ name: name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.delete_transfer_run name: name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.delete_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.delete_transfer_run({ name: name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.delete_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, delete_transfer_run_client_stub.call_count - end - end - end - - def test_list_transfer_runs - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - states = [:TRANSFER_STATE_UNSPECIFIED] - page_token = "hello world" - page_size = 42 - run_attempt = :RUN_ATTEMPT_UNSPECIFIED - - list_transfer_runs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_transfer_runs_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, list_transfer_runs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.list_transfer_runs parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.list_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.list_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, list_transfer_runs_client_stub.call_count - end - end - end - - def test_list_transfer_logs - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - page_token = "hello world" - page_size = 42 - message_types = [:MESSAGE_SEVERITY_UNSPECIFIED] - - list_transfer_logs_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_list_transfer_logs_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, list_transfer_logs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.list_transfer_logs parent: parent, page_token: page_token, page_size: page_size, message_types: message_types do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.list_transfer_logs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.list_transfer_logs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, list_transfer_logs_client_stub.call_count - end - end - end - - def test_check_valid_creds - # Create test objects. - client_result = ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - check_valid_creds_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_check_valid_creds_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, check_valid_creds_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.check_valid_creds({ name: name }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.check_valid_creds name: name do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.check_valid_creds ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.check_valid_creds({ name: name }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.check_valid_creds(::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, check_valid_creds_client_stub.call_count - end - end - end - - def test_enroll_data_sources - # Create test objects. - client_result = ::Google::Protobuf::Empty.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - data_source_ids = ["hello world"] - - enroll_data_sources_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_enroll_data_sources_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, enroll_data_sources_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.enroll_data_sources name: name, data_source_ids: data_source_ids do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.enroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.enroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, enroll_data_sources_client_stub.call_count - end - end - end - - def test_unenroll_data_sources - # Create test objects. - client_result = ::Google::Protobuf::Empty.new - http_response = OpenStruct.new body: client_result.to_json - - call_options = {} - - # Create request parameters for a unary method. - name = "hello world" - data_source_ids = ["hello world"] - - unenroll_data_sources_client_stub = ClientStub.new http_response do |_verb, uri:, body:, params:, options:| - assert options.metadata.key? :"x-goog-api-client" - assert options.metadata[:"x-goog-api-client"].include? "rest" - refute options.metadata[:"x-goog-api-client"].include? "grpc" - end - - ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::ServiceStub.stub :transcode_unenroll_data_sources_request, ["", "", {}] do - Gapic::Rest::ClientStub.stub :new, unenroll_data_sources_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = :dummy_value - end - - # Use hash object - client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use named arguments - client.unenroll_data_sources name: name, data_source_ids: data_source_ids do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object - client.unenroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use hash object with options - client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }, call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Use protobuf object with options - client.unenroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), call_options) do |_result, response| - assert_equal http_response, response.underlying_op - end - - # Verify method calls - assert_equal 5, unenroll_data_sources_client_stub.call_count - end - end - end - - def test_configure - credentials_token = :dummy_value - - client = block_config = config = nil - dummy_stub = ClientStub.new nil - Gapic::Rest::ClientStub.stub :new, dummy_stub do - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client.new do |config| - config.credentials = credentials_token - end - end - - config = client.configure do |c| - block_config = c - end - - assert_same block_config, config - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Rest::Client::Configuration, config - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb deleted file mode 100644 index bfd435f6547c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/google/cloud/bigquery/data_transfer/v1/data_transfer_service_test.rb +++ /dev/null @@ -1,1075 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "helper" - -require "gapic/grpc/service_stub" - -require "google/cloud/bigquery/datatransfer/v1/datatransfer_pb" -require "google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb" -require "google/cloud/bigquery/data_transfer/v1/data_transfer_service" - -class ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::ClientTest < Minitest::Test - class ClientStub - attr_accessor :call_rpc_count, :requests - - def initialize response, operation, &block - @response = response - @operation = operation - @block = block - @call_rpc_count = 0 - @requests = [] - end - - def call_rpc *args, **kwargs - @call_rpc_count += 1 - - @requests << @block&.call(*args, **kwargs) - - yield @response, @operation if block_given? - - @response - end - - def endpoint - "endpoint.example.com" - end - - def universe_domain - "example.com" - end - end - - def test_get_data_source - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - get_data_source_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :get_data_source, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, request - assert_equal "hello world", request["name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, get_data_source_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.get_data_source({ name: name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.get_data_source name: name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.get_data_source ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.get_data_source({ name: name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.get_data_source(::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new(name: name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, get_data_source_client_stub.call_rpc_count - end - end - - def test_list_data_sources - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - page_token = "hello world" - page_size = 42 - - list_data_sources_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :list_data_sources, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, request - assert_equal "hello world", request["parent"] - assert_equal "hello world", request["page_token"] - assert_equal 42, request["page_size"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, list_data_sources_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.list_data_sources parent: parent, page_token: page_token, page_size: page_size do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.list_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.list_data_sources({ parent: parent, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.list_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new(parent: parent, page_token: page_token, page_size: page_size), grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, list_data_sources_client_stub.call_rpc_count - end - end - - def test_create_transfer_config - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - transfer_config = {} - authorization_code = "hello world" - version_info = "hello world" - service_account_name = "hello world" - - create_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :create_transfer_config, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, request - assert_equal "hello world", request["parent"] - assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig), request["transfer_config"] - assert_equal "hello world", request["authorization_code"] - assert_equal "hello world", request["version_info"] - assert_equal "hello world", request["service_account_name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, create_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.create_transfer_config parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.create_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.create_transfer_config({ parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.create_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new(parent: parent, transfer_config: transfer_config, authorization_code: authorization_code, version_info: version_info, service_account_name: service_account_name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, create_transfer_config_client_stub.call_rpc_count - end - end - - def test_update_transfer_config - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - transfer_config = {} - authorization_code = "hello world" - update_mask = {} - version_info = "hello world" - service_account_name = "hello world" - - update_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :update_transfer_config, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, request - assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig), request["transfer_config"] - assert_equal "hello world", request["authorization_code"] - assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"] - assert_equal "hello world", request["version_info"] - assert_equal "hello world", request["service_account_name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, update_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.update_transfer_config transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.update_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.update_transfer_config({ transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.update_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new(transfer_config: transfer_config, authorization_code: authorization_code, update_mask: update_mask, version_info: version_info, service_account_name: service_account_name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, update_transfer_config_client_stub.call_rpc_count - end - end - - def test_delete_transfer_config - # Create GRPC objects. - grpc_response = ::Google::Protobuf::Empty.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - delete_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :delete_transfer_config, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, request - assert_equal "hello world", request["name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, delete_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.delete_transfer_config({ name: name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.delete_transfer_config name: name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.delete_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.delete_transfer_config({ name: name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.delete_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new(name: name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, delete_transfer_config_client_stub.call_rpc_count - end - end - - def test_get_transfer_config - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - get_transfer_config_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :get_transfer_config, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, request - assert_equal "hello world", request["name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, get_transfer_config_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.get_transfer_config({ name: name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.get_transfer_config name: name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.get_transfer_config ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.get_transfer_config({ name: name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.get_transfer_config(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new(name: name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, get_transfer_config_client_stub.call_rpc_count - end - end - - def test_list_transfer_configs - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - data_source_ids = ["hello world"] - page_token = "hello world" - page_size = 42 - - list_transfer_configs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :list_transfer_configs, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, request - assert_equal "hello world", request["parent"] - assert_equal ["hello world"], request["data_source_ids"] - assert_equal "hello world", request["page_token"] - assert_equal 42, request["page_size"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, list_transfer_configs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.list_transfer_configs parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.list_transfer_configs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.list_transfer_configs({ parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.list_transfer_configs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new(parent: parent, data_source_ids: data_source_ids, page_token: page_token, page_size: page_size), grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, list_transfer_configs_client_stub.call_rpc_count - end - end - - def test_schedule_transfer_runs - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - start_time = {} - end_time = {} - - schedule_transfer_runs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :schedule_transfer_runs, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest, request - assert_equal "hello world", request["parent"] - assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["start_time"] - assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["end_time"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, schedule_transfer_runs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.schedule_transfer_runs parent: parent, start_time: start_time, end_time: end_time do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.schedule_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.schedule_transfer_runs({ parent: parent, start_time: start_time, end_time: end_time }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.schedule_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new(parent: parent, start_time: start_time, end_time: end_time), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, schedule_transfer_runs_client_stub.call_rpc_count - end - end - - def test_start_manual_transfer_runs - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - requested_time_range = {} - - start_manual_transfer_runs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :start_manual_transfer_runs, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest, request - assert_equal "hello world", request["parent"] - assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest::TimeRange), request["requested_time_range"] - assert_equal :requested_time_range, request.time - refute_nil options - end - - Gapic::ServiceStub.stub :new, start_manual_transfer_runs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.start_manual_transfer_runs parent: parent, requested_time_range: requested_time_range do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.start_manual_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.start_manual_transfer_runs({ parent: parent, requested_time_range: requested_time_range }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.start_manual_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new(parent: parent, requested_time_range: requested_time_range), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, start_manual_transfer_runs_client_stub.call_rpc_count - end - end - - def test_get_transfer_run - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - get_transfer_run_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :get_transfer_run, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, request - assert_equal "hello world", request["name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, get_transfer_run_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.get_transfer_run({ name: name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.get_transfer_run name: name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.get_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.get_transfer_run({ name: name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.get_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new(name: name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, get_transfer_run_client_stub.call_rpc_count - end - end - - def test_delete_transfer_run - # Create GRPC objects. - grpc_response = ::Google::Protobuf::Empty.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - delete_transfer_run_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :delete_transfer_run, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, request - assert_equal "hello world", request["name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, delete_transfer_run_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.delete_transfer_run({ name: name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.delete_transfer_run name: name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.delete_transfer_run ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.delete_transfer_run({ name: name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.delete_transfer_run(::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new(name: name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, delete_transfer_run_client_stub.call_rpc_count - end - end - - def test_list_transfer_runs - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - states = [:TRANSFER_STATE_UNSPECIFIED] - page_token = "hello world" - page_size = 42 - run_attempt = :RUN_ATTEMPT_UNSPECIFIED - - list_transfer_runs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :list_transfer_runs, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, request - assert_equal "hello world", request["parent"] - assert_equal [:TRANSFER_STATE_UNSPECIFIED], request["states"] - assert_equal "hello world", request["page_token"] - assert_equal 42, request["page_size"] - assert_equal :RUN_ATTEMPT_UNSPECIFIED, request["run_attempt"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, list_transfer_runs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.list_transfer_runs parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.list_transfer_runs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.list_transfer_runs({ parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt }, grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.list_transfer_runs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new(parent: parent, states: states, page_token: page_token, page_size: page_size, run_attempt: run_attempt), grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, list_transfer_runs_client_stub.call_rpc_count - end - end - - def test_list_transfer_logs - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - parent = "hello world" - page_token = "hello world" - page_size = 42 - message_types = [:MESSAGE_SEVERITY_UNSPECIFIED] - - list_transfer_logs_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :list_transfer_logs, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, request - assert_equal "hello world", request["parent"] - assert_equal "hello world", request["page_token"] - assert_equal 42, request["page_size"] - assert_equal [:MESSAGE_SEVERITY_UNSPECIFIED], request["message_types"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, list_transfer_logs_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.list_transfer_logs parent: parent, page_token: page_token, page_size: page_size, message_types: message_types do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.list_transfer_logs ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.list_transfer_logs({ parent: parent, page_token: page_token, page_size: page_size, message_types: message_types }, grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.list_transfer_logs(::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new(parent: parent, page_token: page_token, page_size: page_size, message_types: message_types), grpc_options) do |response, operation| - assert_kind_of Gapic::PagedEnumerable, response - assert_equal grpc_response, response.response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, list_transfer_logs_client_stub.call_rpc_count - end - end - - def test_check_valid_creds - # Create GRPC objects. - grpc_response = ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - - check_valid_creds_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :check_valid_creds, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, request - assert_equal "hello world", request["name"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, check_valid_creds_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.check_valid_creds({ name: name }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.check_valid_creds name: name do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.check_valid_creds ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.check_valid_creds({ name: name }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.check_valid_creds(::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new(name: name), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, check_valid_creds_client_stub.call_rpc_count - end - end - - def test_enroll_data_sources - # Create GRPC objects. - grpc_response = ::Google::Protobuf::Empty.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - data_source_ids = ["hello world"] - - enroll_data_sources_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :enroll_data_sources, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, request - assert_equal "hello world", request["name"] - assert_equal ["hello world"], request["data_source_ids"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, enroll_data_sources_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.enroll_data_sources name: name, data_source_ids: data_source_ids do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.enroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.enroll_data_sources({ name: name, data_source_ids: data_source_ids }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.enroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, enroll_data_sources_client_stub.call_rpc_count - end - end - - def test_unenroll_data_sources - # Create GRPC objects. - grpc_response = ::Google::Protobuf::Empty.new - grpc_operation = GRPC::ActiveCall::Operation.new nil - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - grpc_options = {} - - # Create request parameters for a unary method. - name = "hello world" - data_source_ids = ["hello world"] - - unenroll_data_sources_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| - assert_equal :unenroll_data_sources, name - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest, request - assert_equal "hello world", request["name"] - assert_equal ["hello world"], request["data_source_ids"] - refute_nil options - end - - Gapic::ServiceStub.stub :new, unenroll_data_sources_client_stub do - # Create client - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - - # Use hash object - client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use named arguments - client.unenroll_data_sources name: name, data_source_ids: data_source_ids do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object - client.unenroll_data_sources ::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use hash object with options - client.unenroll_data_sources({ name: name, data_source_ids: data_source_ids }, grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Use protobuf object with options - client.unenroll_data_sources(::Google::Cloud::Bigquery::DataTransfer::V1::UnenrollDataSourcesRequest.new(name: name, data_source_ids: data_source_ids), grpc_options) do |response, operation| - assert_equal grpc_response, response - assert_equal grpc_operation, operation - end - - # Verify method calls - assert_equal 5, unenroll_data_sources_client_stub.call_rpc_count - end - end - - def test_configure - grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure - - client = block_config = config = nil - dummy_stub = ClientStub.new nil, nil - Gapic::ServiceStub.stub :new, dummy_stub do - client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config| - config.credentials = grpc_channel - end - end - - config = client.configure do |c| - block_config = c - end - - assert_same block_config, config - assert_kind_of ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration, config - end -end diff --git a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb b/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb deleted file mode 100644 index 48407bca7edb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data_transfer-v1/test/helper.rb +++ /dev/null @@ -1,25 +0,0 @@ -# frozen_string_literal: true - -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Auto-generated by gapic-generator-ruby. DO NOT EDIT! - -require "minitest/autorun" -require "minitest/focus" -require "minitest/rg" - -require "grpc" - -require "ostruct"