Skip to content

Commit

Permalink
[CHANGE ME] Re-generated google-cloud-bigquery-data_transfer to pick …
Browse files Browse the repository at this point in the history
…up changes in the API or client library generator.
  • Loading branch information
yoshi-automation committed Feb 13, 2020
1 parent ade2581 commit bdef62c
Show file tree
Hide file tree
Showing 8 changed files with 174 additions and 279 deletions.
2 changes: 1 addition & 1 deletion google-cloud-bigquery-data_transfer/README.md
Expand Up @@ -26,7 +26,7 @@ $ gem install google-cloud-bigquery-data_transfer
require "google/cloud/bigquery/data_transfer"

data_transfer_client = Google::Cloud::Bigquery::DataTransfer.new
formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.location_path(project_id, "us-central1")
formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_path(project_id)

# Iterate over all results.
data_transfer_client.list_data_sources(formatted_parent).each do |element|
Expand Down
Expand Up @@ -27,7 +27,7 @@
project_id = ENV["DATA_TRANSFER_TEST_PROJECT"].freeze

data_transfer_client = Google::Cloud::Bigquery::DataTransfer.new(version: :v1)
formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.location_path(project_id, "us-central1")
formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_path(project_id)

# Iterate over all results.
data_transfer_client.list_data_sources(formatted_parent).each do |element|
Expand Down
Expand Up @@ -49,7 +49,7 @@ module Bigquery
# require "google/cloud/bigquery/data_transfer"
#
# data_transfer_client = Google::Cloud::Bigquery::DataTransfer.new
# formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.location_path(project_id, "us-central1")
# formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_path(project_id)
#
# # Iterate over all results.
# data_transfer_client.list_data_sources(formatted_parent).each do |element|
Expand Down
Expand Up @@ -49,7 +49,7 @@ module DataTransfer
# require "google/cloud/bigquery/data_transfer"
#
# data_transfer_client = Google::Cloud::Bigquery::DataTransfer.new(version: :v1)
# formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.location_path(project_id, "us-central1")
# formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_path(project_id)
#
# # Iterate over all results.
# data_transfer_client.list_data_sources(formatted_parent).each do |element|
Expand Down
Expand Up @@ -144,6 +144,8 @@ def self.location_path project, location
end

# Returns a fully-qualified location_data_source resource name string.
# @deprecated Multi-pattern resource names will have unified creation and parsing helper functions.
# This helper function will be deleted in the next major version.
# @param project [String]
# @param location [String]
# @param data_source [String]
Expand All @@ -157,6 +159,8 @@ def self.location_data_source_path project, location, data_source
end

# Returns a fully-qualified location_run resource name string.
# @deprecated Multi-pattern resource names will have unified creation and parsing helper functions.
# This helper function will be deleted in the next major version.
# @param project [String]
# @param location [String]
# @param transfer_config [String]
Expand All @@ -172,6 +176,8 @@ def self.location_run_path project, location, transfer_config, run
end

# Returns a fully-qualified location_transfer_config resource name string.
# @deprecated Multi-pattern resource names will have unified creation and parsing helper functions.
# This helper function will be deleted in the next major version.
# @param project [String]
# @param location [String]
# @param transfer_config [String]
Expand All @@ -194,6 +200,8 @@ def self.project_path project
end

# Returns a fully-qualified project_data_source resource name string.
# @deprecated Multi-pattern resource names will have unified creation and parsing helper functions.
# This helper function will be deleted in the next major version.
# @param project [String]
# @param data_source [String]
# @return [String]
Expand All @@ -205,6 +213,8 @@ def self.project_data_source_path project, data_source
end

# Returns a fully-qualified project_run resource name string.
# @deprecated Multi-pattern resource names will have unified creation and parsing helper functions.
# This helper function will be deleted in the next major version.
# @param project [String]
# @param transfer_config [String]
# @param run [String]
Expand All @@ -218,6 +228,8 @@ def self.project_run_path project, transfer_config, run
end

# Returns a fully-qualified project_transfer_config resource name string.
# @deprecated Multi-pattern resource names will have unified creation and parsing helper functions.
# This helper function will be deleted in the next major version.
# @param project [String]
# @param transfer_config [String]
# @return [String]
Expand Down Expand Up @@ -401,6 +413,14 @@ def initialize \
{'parent' => request.parent}
end
)
@start_manual_transfer_runs = Google::Gax.create_api_call(
@data_transfer_service_stub.method(:start_manual_transfer_runs),
defaults["start_manual_transfer_runs"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'parent' => request.parent}
end
)
@get_transfer_run = Google::Gax.create_api_call(
@data_transfer_service_stub.method(:get_transfer_run),
defaults["get_transfer_run"],
Expand Down Expand Up @@ -441,14 +461,6 @@ def initialize \
{'name' => request.name}
end
)
@start_manual_transfer_runs = Google::Gax.create_api_call(
@data_transfer_service_stub.method(:start_manual_transfer_runs),
defaults["start_manual_transfer_runs"],
exception_transformer: exception_transformer,
params_extractor: proc do |request|
{'parent' => request.parent}
end
)
end

# Service calls
Expand Down Expand Up @@ -881,6 +893,53 @@ def schedule_transfer_runs \
@schedule_transfer_runs.call(req, options, &block)
end

# Start manual transfer runs to be executed now with schedule_time equal to
# current time. The transfer runs can be created for a time range where the
# run_time is between start_time (inclusive) and end_time (exclusive), or for
# a specific run_time.
#
# @param parent [String]
# Transfer configuration name in the form:
# `projects/{project_id}/transferConfigs/{config_id}` or
# `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
# @param requested_time_range [Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsRequest::TimeRange | Hash]
# Time range for the transfer runs that should be started.
# A hash of the same form as `Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsRequest::TimeRange`
# can also be provided.
# @param requested_run_time [Google::Protobuf::Timestamp | Hash]
# Specific run_time for a transfer run to be started. The
# requested_run_time must not be in the future.
# A hash of the same form as `Google::Protobuf::Timestamp`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/bigquery/data_transfer"
#
# data_transfer_client = Google::Cloud::Bigquery::DataTransfer.new(version: :v1)
# response = data_transfer_client.start_manual_transfer_runs

def start_manual_transfer_runs \
parent: nil,
requested_time_range: nil,
requested_run_time: nil,
options: nil,
&block
req = {
parent: parent,
requested_time_range: requested_time_range,
requested_run_time: requested_run_time
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsRequest)
@start_manual_transfer_runs.call(req, options, &block)
end

# Returns information about the particular transfer run.
#
# @param name [String]
Expand Down Expand Up @@ -1107,53 +1166,6 @@ def check_valid_creds \
req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::Datatransfer::V1::CheckValidCredsRequest)
@check_valid_creds.call(req, options, &block)
end

# Start manual transfer runs to be executed now with schedule_time equal to
# current time. The transfer runs can be created for a time range where the
# run_time is between start_time (inclusive) and end_time (exclusive), or for
# a specific run_time.
#
# @param parent [String]
# Transfer configuration name in the form:
# `projects/{project_id}/transferConfigs/{config_id}` or
# `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`.
# @param requested_time_range [Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsRequest::TimeRange | Hash]
# Time range for the transfer runs that should be started.
# A hash of the same form as `Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsRequest::TimeRange`
# can also be provided.
# @param requested_run_time [Google::Protobuf::Timestamp | Hash]
# Specific run_time for a transfer run to be started. The
# requested_run_time must not be in the future.
# A hash of the same form as `Google::Protobuf::Timestamp`
# can also be provided.
# @param options [Google::Gax::CallOptions]
# Overrides the default settings for this call, e.g, timeout,
# retries, etc.
# @yield [result, operation] Access the result along with the RPC operation
# @yieldparam result [Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsResponse]
# @yieldparam operation [GRPC::ActiveCall::Operation]
# @return [Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsResponse]
# @raise [Google::Gax::GaxError] if the RPC is aborted.
# @example
# require "google/cloud/bigquery/data_transfer"
#
# data_transfer_client = Google::Cloud::Bigquery::DataTransfer.new(version: :v1)
# response = data_transfer_client.start_manual_transfer_runs

def start_manual_transfer_runs \
parent: nil,
requested_time_range: nil,
requested_run_time: nil,
options: nil,
&block
req = {
parent: parent,
requested_time_range: requested_time_range,
requested_run_time: requested_run_time
}.delete_if { |_, v| v.nil? }
req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsRequest)
@start_manual_transfer_runs.call(req, options, &block)
end
end
end
end
Expand Down
Expand Up @@ -21,71 +21,71 @@
},
"methods": {
"GetDataSource": {
"timeout_millis": 20000,
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"ListDataSources": {
"timeout_millis": 20000,
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"CreateTransferConfig": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"UpdateTransferConfig": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"DeleteTransferConfig": {
"timeout_millis": 30000,
"retry_codes_name": "idempotent",
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"GetTransferConfig": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"ListTransferConfigs": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"ScheduleTransferRuns": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"StartManualTransferRuns": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"GetTransferRun": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"DeleteTransferRun": {
"timeout_millis": 30000,
"retry_codes_name": "idempotent",
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"ListTransferRuns": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"ListTransferLogs": {
"timeout_millis": 30000,
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"CheckValidCreds": {
"timeout_millis": 30000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"StartManualTransferRuns": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
Expand Down
159 changes: 21 additions & 138 deletions google-cloud-bigquery-data_transfer/synth.metadata

Large diffs are not rendered by default.

0 comments on commit bdef62c

Please sign in to comment.